def _parseRecordM(self, line, event, pick): """ Parses the 'surface wave record' M """ #unused: Z_comp = line[7] Z_period = self._float(line[9:13]) # note: according to the format documentation, # column 20 should be blank. However, it seems that # Z_amplitude includes that column Z_amplitude = self._float(line[13:21]) # micrometers #TODO: N_comp and E_comp seems to be never there MSZ_mag = line[49:52] Ms_mag = self._float(line[53:56]) #unused: Ms_usage_flag = line[56] evid = event.resource_id.id.split('/')[-1] station_string =\ pick.waveform_id.getSEEDString()\ .replace(' ', '-').replace('.', '_').lower() amplitude = None if Z_amplitude is not None: amplitude = Amplitude() prefix = '/'.join((res_id_prefix, 'amp', evid, station_string)) amplitude.resource_id = ResourceIdentifier(prefix=prefix) amplitude.generic_amplitude = Z_amplitude * 1E-6 amplitude.unit = 'm' amplitude.period = Z_period amplitude.type = 'AS' amplitude.magnitude_hint = 'Ms' amplitude.pick_id = pick.resource_id event.amplitudes.append(amplitude) if MSZ_mag is not None: station_magnitude = StationMagnitude() prefix = '/'.join( (res_id_prefix, 'stationmagntiude', evid, station_string)) station_magnitude.resource_id = ResourceIdentifier(prefix=prefix) station_magnitude.origin_id = event.origins[0].resource_id station_magnitude.mag = Ms_mag station_magnitude.station_magnitude_type = 'Ms' if amplitude is not None: station_magnitude.amplitude_id = amplitude.resource_id event.station_magnitudes.append(station_magnitude)
def _parseRecordP(self, line, event): """ Parses the 'primary phase record' P The primary phase is the first phase of the reading, regardless its type. """ station = line[2:7].strip() phase = line[7:15] arrival_time = line[15:24] residual = self._float(line[25:30]) #unused: residual_flag = line[30] distance = self._float(line[32:38]) # degrees azimuth = self._float(line[39:44]) backazimuth = round(azimuth % -360 + 180, 1) mb_period = self._float(line[44:48]) mb_amplitude = self._float(line[48:55]) # nanometers mb_magnitude = self._float(line[56:59]) #unused: mb_usage_flag = line[59] origin = event.origins[0] evid = event.resource_id.id.split('/')[-1] waveform_id = WaveformStreamID() waveform_id.station_code = station #network_code is required for QuakeML validation waveform_id.network_code = ' ' station_string =\ waveform_id.getSEEDString()\ .replace(' ', '-').replace('.', '_').lower() prefix = '/'.join( (res_id_prefix, 'waveformstream', evid, station_string)) waveform_id.resource_uri = ResourceIdentifier(prefix=prefix) pick = Pick() prefix = '/'.join((res_id_prefix, 'pick', evid, station_string)) pick.resource_id = ResourceIdentifier(prefix=prefix) date = origin.time.strftime('%Y%m%d') pick.time = UTCDateTime(date + arrival_time) #Check if pick is on the next day: if pick.time < origin.time: pick.time += timedelta(days=1) pick.waveform_id = waveform_id pick.backazimuth = backazimuth onset = phase[0] if onset == 'e': pick.onset = 'emergent' phase = phase[1:] elif onset == 'i': pick.onset = 'impulsive' phase = phase[1:] elif onset == 'q': pick.onset = 'questionable' phase = phase[1:] pick.phase_hint = phase.strip() event.picks.append(pick) if mb_amplitude is not None: amplitude = Amplitude() prefix = '/'.join((res_id_prefix, 'amp', evid, station_string)) amplitude.resource_id = ResourceIdentifier(prefix=prefix) amplitude.generic_amplitude = mb_amplitude * 1E-9 amplitude.unit = 'm' amplitude.period = mb_period amplitude.type = 'AB' amplitude.magnitude_hint = 'Mb' amplitude.pick_id = pick.resource_id amplitude.waveform_id = pick.waveform_id event.amplitudes.append(amplitude) station_magnitude = StationMagnitude() prefix = '/'.join( (res_id_prefix, 'stationmagntiude', evid, station_string)) station_magnitude.resource_id = ResourceIdentifier(prefix=prefix) station_magnitude.origin_id = origin.resource_id station_magnitude.mag = mb_magnitude #station_magnitude.mag_errors['uncertainty'] = 0.0 station_magnitude.station_magnitude_type = 'Mb' station_magnitude.amplitude_id = amplitude.resource_id station_magnitude.waveform_id = pick.waveform_id res_id = '/'.join( (res_id_prefix, 'magnitude/generic/body_wave_magnitude')) station_magnitude.method_id =\ ResourceIdentifier(id=res_id) event.station_magnitudes.append(station_magnitude) arrival = Arrival() prefix = '/'.join((res_id_prefix, 'arrival', evid, station_string)) arrival.resource_id = ResourceIdentifier(prefix=prefix) arrival.pick_id = pick.resource_id arrival.phase = pick.phase_hint arrival.azimuth = azimuth arrival.distance = distance arrival.time_residual = residual res_id = '/'.join((res_id_prefix, 'earthmodel/ak135')) arrival.earth_model_id = ResourceIdentifier(id=res_id) origin.arrivals.append(arrival) origin.quality.minimum_distance = min( d for d in (arrival.distance, origin.quality.minimum_distance) if d is not None) origin.quality.maximum_distance =\ max(arrival.distance, origin.quality.minimum_distance) origin.quality.associated_phase_count += 1 return pick, arrival
def _parse_arrivals(self, event, origin, origin_res_id): # Skip header of arrivals next(self.lines) # Stop the loop after 2 empty lines (according to the standard). previous_line_empty = False for line in self.lines: line_empty = not line or line.isspace() if not self.event_point_separator: # Event are separated by two empty lines if line_empty and previous_line_empty: break else: # Event are separated by '.' if line.startswith('.'): break previous_line_empty = line_empty if line_empty: # Skip empty lines when the loop should be stopped by # point continue magnitude_types = [] magnitude_values = [] fields = self.fields['arrival'] station = line[fields['sta']].strip() distance = line[fields['dist']].strip() event_azimuth = line[fields['ev_az']].strip() evaluation_mode = line[fields['picktype']].strip() direction = line[fields['direction']].strip() onset = line[fields['detchar']].strip() phase = line[fields['phase']].strip() time = line[fields['time']].strip().replace('/', '-') time_residual = line[fields['t_res']].strip() arrival_azimuth = line[fields['azim']].strip() azimuth_residual = line[fields['az_res']].strip() slowness = line[fields['slow']].strip() slowness_residual = line[fields['s_res']].strip() time_defining_flag = line[fields['t_def']].strip() azimuth_defining_flag = line[fields['a_def']].strip() slowness_defining_flag = line[fields['s_def']].strip() snr = line[fields['snr']].strip() amplitude_value = line[fields['amp']].strip() period = line[fields['per']].strip() magnitude_types.append(line[fields['mag_type_1']].strip()) magnitude_values.append(line[fields['mag_1']].strip()) magnitude_types.append(line[fields['mag_type_2']].strip()) magnitude_values.append(line[fields['mag_2']].strip()) line_id = line[fields['id']].strip() # Don't take pick and arrival with wrong time residual if '*' in time_residual: continue try: pick = Pick() pick.creation_info = self._get_creation_info() pick.waveform_id = WaveformStreamID() pick.waveform_id.station_code = station pick.time = UTCDateTime(time) network_code = self.default_network_code location_code = self.default_location_code channel_code = self.default_channel_code try: network_code, channel = self._get_channel( station, pick.time) if channel: channel_code = channel.code location_code = channel.location_code except TypeError: pass pick.waveform_id.network_code = network_code pick.waveform_id.channel_code = channel_code if location_code: pick.waveform_id.location_code = location_code try: ev_mode = EVALUATION_MODES[evaluation_mode] pick.evaluation_mode = ev_mode except KeyError: pass try: pick.polarity = PICK_POLARITIES[direction] except KeyError: pass try: pick.onset = PICK_ONSETS[onset] except KeyError: pass pick.phase_hint = phase try: pick.backazimuth = float(arrival_azimuth) except ValueError: pass try: pick.horizontal_slowness = float(slowness) except ValueError: pass public_id = "pick/%s" % line_id pick.resource_id = self._get_res_id(public_id) event.picks.append(pick) except (TypeError, ValueError, AttributeError): # Can't parse pick, skip arrival and amplitude parsing continue arrival = Arrival() arrival.creation_info = self._get_creation_info() try: arrival.pick_id = pick.resource_id.id except AttributeError: pass arrival.phase = phase try: arrival.azimuth = float(event_azimuth) except ValueError: pass try: arrival.distance = float(distance) except ValueError: pass try: arrival.time_residual = float(time_residual) except ValueError: pass try: arrival.backazimuth_residual = float(azimuth_residual) except ValueError: pass try: arrival.horizontal_slowness_residual = float(slowness_residual) except ValueError: pass if time_defining_flag == 'T': arrival.time_weight = 1 if azimuth_defining_flag == 'A': arrival.backazimuth_weight = 1 if slowness_defining_flag == 'S': arrival.horizontal_slowness_weight = 1 public_id = "arrival/%s" % line_id arrival.resource_id = self._get_res_id(public_id, parent_res_id=origin_res_id) origin.arrivals.append(arrival) try: amplitude = Amplitude() amplitude.creation_info = self._get_creation_info() amplitude.generic_amplitude = float(amplitude_value) try: amplitude.pick_id = pick.resource_id amplitude.waveform_id = pick.waveform_id except AttributeError: pass try: amplitude.period = float(period) except ValueError: pass try: amplitude.snr = float(snr) except ValueError: pass for i in [0, 1]: if magnitude_types[i] and not magnitude_types[i].isspace(): amplitude.magnitude_hint = magnitude_types[i] public_id = "amplitude/%s" % line_id amplitude.resource_id = self._get_res_id(public_id) event.amplitudes.append(amplitude) for i in [0, 1]: sta_mag = StationMagnitude() sta_mag.creation_info = self._get_creation_info() sta_mag.origin_id = origin_res_id sta_mag.amplitude_id = amplitude.resource_id sta_mag.station_magnitude_type = magnitude_types[i] sta_mag.mag = magnitude_values[i] sta_mag.waveform_id = pick.waveform_id public_id = "magnitude/station/%s/%s" % (line_id, i) sta_mag.resource_id = self._get_res_id(public_id) event.station_magnitudes.append(sta_mag) # Associate station mag with network mag of same type mag = self._find_magnitude_by_type(event, origin_res_id, magnitude_types[i]) if mag: contrib = StationMagnitudeContribution() contrib.station_magnitude_id = sta_mag.resource_id contrib.weight = 1.0 mag.station_magnitude_contributions.append(contrib) except ValueError: pass
def full_test_event(): """ Function to generate a basic, full test event """ test_event = Event() test_event.origins.append(Origin()) test_event.origins[0].time = UTCDateTime("2012-03-26") + 1.2 test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.origins[0].latitude = 45.0 test_event.origins[0].longitude = 25.0 test_event.origins[0].depth = 15000 test_event.creation_info = CreationInfo(agency_id='TES') test_event.origins[0].quality = OriginQuality(standard_error=0.01) test_event.magnitudes.append(Magnitude()) test_event.magnitudes[0].mag = 0.1 test_event.magnitudes[0].magnitude_type = 'ML' test_event.magnitudes[0].creation_info = CreationInfo('TES') test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[1].mag = 0.5 test_event.magnitudes[1].magnitude_type = 'Mc' test_event.magnitudes[1].creation_info = CreationInfo('TES') test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[2].mag = 1.3 test_event.magnitudes[2].magnitude_type = 'Ms' test_event.magnitudes[2].creation_info = CreationInfo('TES') test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id # Define the test pick _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1', network_code=' ') # Pick to associate with amplitude - 0 test_event.picks = [ Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72, evaluation_mode="manual"), Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62, evaluation_mode="automatic")] # Test a generic local magnitude amplitude pick test_event.amplitudes = [ Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', magnitude_hint='ML', category='point', type='AML'), Amplitude(generic_amplitude=10, pick_id=test_event.picks[1].resource_id, waveform_id=test_event.picks[1].waveform_id, type='END', category='duration', unit='s', magnitude_hint='Mc', snr=2.3), Amplitude(generic_amplitude=5.0, period=0.6, pick_id=test_event.picks[2].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', category='point', type='AML')] test_event.origins[0].arrivals = [ Arrival(time_weight=0, phase=test_event.picks[1].phase_hint, pick_id=test_event.picks[1].resource_id), Arrival(time_weight=2, phase=test_event.picks[3].phase_hint, pick_id=test_event.picks[3].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25), Arrival(time_weight=2, phase=test_event.picks[4].phase_hint, pick_id=test_event.picks[4].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)] return test_event
def full_test_event(): """ Function to generate a basic, full test event """ test_event = Event() test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.0, longitude=25.0, depth=15000)) test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.creation_info = CreationInfo(agency_id='TES') test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='ML', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=0.5, magnitude_type='Mc', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) test_event.magnitudes.append( Magnitude(mag=1.3, magnitude_type='Ms', creation_info=CreationInfo('TES'), origin_id=test_event.origins[0].resource_id)) # Define the test pick _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1', network_code=' ') # Pick to associate with amplitude test_event.picks.append( Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Need a second pick for coda test_event.picks.append( Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72, evaluation_mode="manual")) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62, evaluation_mode="automatic")) # Test a generic local magnitude amplitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', magnitude_hint='ML', category='point', type='AML')) # Test a coda magnitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=10, pick_id=test_event.picks[1].resource_id, waveform_id=test_event.picks[1].waveform_id, type='END', category='duration', unit='s', magnitude_hint='Mc', snr=2.3)) test_event.origins[0].arrivals.append( Arrival(time_weight=0, phase=test_event.picks[1].phase_hint, pick_id=test_event.picks[1].resource_id)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[2].phase_hint, pick_id=test_event.picks[2].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[3].phase_hint, pick_id=test_event.picks[3].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) # Add in error info (line E) test_event.origins[0].quality = OriginQuality(standard_error=0.01, azimuthal_gap=36) # Origin uncertainty in Seisan is output as long-lat-depth, quakeML has # semi-major and semi-minor test_event.origins[0].origin_uncertainty = OriginUncertainty( confidence_ellipsoid=ConfidenceEllipsoid( semi_major_axis_length=3000, semi_minor_axis_length=1000, semi_intermediate_axis_length=2000, major_axis_plunge=20, major_axis_azimuth=100, major_axis_rotation=4)) test_event.origins[0].time_errors = QuantityError(uncertainty=0.5) # Add in fault-plane solution info (line F) - Note have to check program # used to determine which fields are filled.... test_event.focal_mechanisms.append( FocalMechanism(nodal_planes=NodalPlanes( nodal_plane_1=NodalPlane(strike=180, dip=20, rake=30, strike_errors=QuantityError(10), dip_errors=QuantityError(10), rake_errors=QuantityError(20))), method_id=ResourceIdentifier( "smi:nc.anss.org/focalMechanism/FPFIT"), creation_info=CreationInfo(agency_id="NC"), misfit=0.5, station_distribution_ratio=0.8)) # Need to test high-precision origin and that it is preferred origin. # Moment tensor includes another origin test_event.origins.append( Origin(time=UTCDateTime("2012-03-26") + 1.2, latitude=45.1, longitude=25.2, depth=14500)) test_event.magnitudes.append( Magnitude(mag=0.1, magnitude_type='MW', creation_info=CreationInfo('TES'), origin_id=test_event.origins[-1].resource_id)) # Moment tensors go with focal-mechanisms test_event.focal_mechanisms.append( FocalMechanism(moment_tensor=MomentTensor( derived_origin_id=test_event.origins[-1].resource_id, moment_magnitude_id=test_event.magnitudes[-1].resource_id, scalar_moment=100, tensor=Tensor( m_rr=100, m_tt=100, m_pp=10, m_rt=1, m_rp=20, m_tp=15), method_id=ResourceIdentifier( 'smi:nc.anss.org/momentTensor/BLAH')))) return test_event
def _parse_phase(self, line): # since we can not identify which origin a phase line corresponds to, # we can not use any of the included information that would go in the # Arrival object, as that would have to be attached to the appropriate # origin.. # for now, just append all of these items as comments to the pick comments = [] # 1-5 a5 station code station_code = line[0:5].strip() # 7-12 f6.2 station-to-event distance (degrees) comments.append('station-to-event distance (degrees): "{}"'.format( line[6:12])) # 14-18 f5.1 event-to-station azimuth (degrees) comments.append('event-to-station azimuth (degrees): "{}"'.format( line[13:18])) # 20-27 a8 phase code phase_hint = line[19:27].strip() # 29-40 i2,a1,i2,a1,f6.3 arrival time (hh:mm:ss.sss) time = self._get_pick_time(line[28:40]) if time is None: msg = ('Could not determine absolute time of pick. This phase ' 'line will be ignored:\n{}').format(line) warnings.warn(msg) return None, None, None # 42-46 f5.1 time residual (seconds) comments.append('time residual (seconds): "{}"'.format(line[41:46])) # 48-52 f5.1 observed azimuth (degrees) comments.append('observed azimuth (degrees): "{}"'.format(line[47:52])) # 54-58 f5.1 azimuth residual (degrees) comments.append('azimuth residual (degrees): "{}"'.format(line[53:58])) # 60-65 f5.1 observed slowness (seconds/degree) comments.append('observed slowness (seconds/degree): "{}"'.format( line[59:65])) # 67-72 f5.1 slowness residual (seconds/degree) comments.append('slowness residual (seconds/degree): "{}"'.format( line[66:71])) # 74 a1 time defining flag (T or _) comments.append('time defining flag (T or _): "{}"'.format(line[73])) # 75 a1 azimuth defining flag (A or _) comments.append('azimuth defining flag (A or _): "{}"'.format( line[74])) # 76 a1 slowness defining flag (S or _) comments.append('slowness defining flag (S or _): "{}"'.format( line[75])) # 78-82 f5.1 signal-to-noise ratio comments.append('signal-to-noise ratio: "{}"'.format(line[77:82])) # 84-92 f9.1 amplitude (nanometers) amplitude = float_or_none(line[83:92]) # 94-98 f5.2 period (seconds) period = float_or_none(line[93:98]) # 100 a1 type of pick (a = automatic, m = manual) evaluation_mode = line[99] # 101 a1 direction of short period motion # (c = compression, d = dilatation, _= null) polarity = POLARITY[line[100].strip().lower()] # 102 a1 onset quality (i = impulsive, e = emergent, # q = questionable, _ = null) onset = ONSET[line[101].strip().lower()] # 104-108 a5 magnitude type (mb, Ms, ML, mbmle, msmle) magnitude_type = line[103:108].strip() # 109 a1 min max indicator (<, >, or blank) min_max_indicator = line[108] # 110-113 f4.1 magnitude value mag = float_or_none(line[109:113]) # 115-122 a8 arrival identification phase_id = line[114:122].strip() # process items waveform_id = WaveformStreamID(station_code=station_code) evaluation_mode = PICK_EVALUATION_MODE[evaluation_mode.strip().lower()] comments = [self._make_comment(', '.join(comments))] if phase_id: resource_id = self._construct_id(['pick'], add_hash=True) else: resource_id = self._construct_id(['pick', phase_id]) if mag: comment = ('min max indicator (<, >, or blank): ' + min_max_indicator) station_magnitude = StationMagnitude( mag=mag, magnitude_type=magnitude_type, resource_id=self._construct_id(['station_magnitude'], add_hash=True), comments=[self._make_comment(comment)]) # event init always sets an empty ResourceIdentifier, even when # specifying None, which is strange for key in ['origin_id', 'mag_errors']: setattr(station_magnitude, key, None) else: station_magnitude = None # assemble pick = Pick(phase_hint=phase_hint, time=time, waveform_id=waveform_id, evaluation_mode=evaluation_mode, comments=comments, polarity=polarity, onset=onset, resource_id=resource_id) # event init always sets an empty QuantityError, even when specifying # None, which is strange for key in ('time_errors', 'horizontal_slowness_errors', 'backazimuth_errors'): setattr(pick, key, None) if amplitude: amplitude /= 1e9 # convert from nanometers to meters amplitude = Amplitude(unit='m', generic_amplitude=amplitude, period=period) return pick, amplitude, station_magnitude
def full_test_event(): """ Function to generate a basic, full test event """ from obspy.core.event import Pick, WaveformStreamID, Arrival, Amplitude from obspy.core.event import Event, Origin, Magnitude from obspy.core.event import EventDescription, CreationInfo from obspy import UTCDateTime test_event = Event() test_event.origins.append(Origin()) test_event.origins[0].time = UTCDateTime("2012-03-26") + 1 test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.origins[0].latitude = 45.0 test_event.origins[0].longitude = 25.0 test_event.origins[0].depth = 15000 test_event.creation_info = CreationInfo(agency_id='TES') test_event.origins[0].time_errors['Time_Residual_RMS'] = 0.01 test_event.magnitudes.append(Magnitude()) test_event.magnitudes[0].mag = 0.1 test_event.magnitudes[0].magnitude_type = 'ML' test_event.magnitudes[0].creation_info = CreationInfo('TES') test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[1].mag = 0.5 test_event.magnitudes[1].magnitude_type = 'Mc' test_event.magnitudes[1].creation_info = CreationInfo('TES') test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[2].mag = 1.3 test_event.magnitudes[2].magnitude_type = 'Ms' test_event.magnitudes[2].creation_info = CreationInfo('TES') test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id # Define the test pick _waveform_id_1 = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') _waveform_id_2 = WaveformStreamID(station_code='WTSZ', channel_code='BH1', network_code=' ') # Pick to associate with amplitude test_event.picks.append( Pick(waveform_id=_waveform_id_1, phase_hint='IAML', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.68)) # Need a second pick for coda test_event.picks.append( Pick(waveform_id=_waveform_id_1, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68)) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='SG', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.72)) # Unassociated pick test_event.picks.append( Pick(waveform_id=_waveform_id_2, onset='impulsive', phase_hint='PN', polarity='undecidable', time=UTCDateTime("2012-03-26") + 1.62)) # Test a generic local magnitude amplitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m', magnitude_hint='Ml')) # Test a coda magnitude pick test_event.amplitudes.append( Amplitude(generic_amplitude=10, pick_id=test_event.picks[1].resource_id, waveform_id=test_event.picks[1].waveform_id, type='END', category='duration', unit='s', magnitude_hint='Mc', snr=2.3)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[2].phase_hint, pick_id=test_event.picks[2].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[3].phase_hint, pick_id=test_event.picks[3].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) return test_event
def _read_single_event(event_file, locate_dir, units, local_mag_ph): """ Parse an event file from QuakeMigrate into an obspy Event object. Parameters ---------- event_file : `pathlib.Path` object Path to .event file to read. locate_dir : `pathlib.Path` object Path to locate directory (contains "events", "picks" etc. directories). units : {"km", "m"} Grid projection coordinates for QM LUT (determines units of depths and uncertainties in the .event files). local_mag_ph : {"S", "P"} Amplitude measurement used to calculate local magnitudes. Returns ------- event : `obspy.Event` object Event object populated with all available information output by :class:`~quakemigrate.signal.scan.locate()`, including event locations and uncertainties, picks, and amplitudes and magnitudes if available. """ # Parse information from event file event_info = pd.read_csv(event_file).iloc[0] event_uid = str(event_info["EventID"]) # Set distance conversion factor (from units of QM LUT projection units). if units == "km": factor = 1e3 elif units == "m": factor = 1 else: raise AttributeError(f"units must be 'km' or 'm'; not {units}") # Create event object to store origin and pick information event = Event() event.extra = AttribDict() event.resource_id = str(event_info["EventID"]) event.creation_info = CreationInfo(author="QuakeMigrate", version=quakemigrate.__version__) # Add COA info to extra event.extra.coa = {"value": event_info["COA"], "namespace": ns} event.extra.coa_norm = {"value": event_info["COA_NORM"], "namespace": ns} event.extra.trig_coa = {"value": event_info["TRIG_COA"], "namespace": ns} event.extra.dec_coa = {"value": event_info["DEC_COA"], "namespace": ns} event.extra.dec_coa_norm = { "value": event_info["DEC_COA_NORM"], "namespace": ns } # Determine location of cut waveform data - add to event object as a # custom extra attribute. mseed = locate_dir / "raw_cut_waveforms" / event_uid event.extra.cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } if (locate_dir / "real_cut_waveforms").exists(): mseed = locate_dir / "real_cut_waveforms" / event_uid event.extra.real_cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } if (locate_dir / "wa_cut_waveforms").exists(): mseed = locate_dir / "wa_cut_waveforms" / event_uid event.extra.wa_cut_waveforms_file = { "value": str(mseed.with_suffix(".m").resolve()), "namespace": ns } # Create origin with spline location and set to preferred event origin. origin = Origin() origin.method_id = "spline" origin.longitude = event_info["X"] origin.latitude = event_info["Y"] origin.depth = event_info["Z"] * factor origin.time = UTCDateTime(event_info["DT"]) event.origins = [origin] event.preferred_origin_id = origin.resource_id # Create origin with gaussian location and associate with event origin = Origin() origin.method_id = "gaussian" origin.longitude = event_info["GAU_X"] origin.latitude = event_info["GAU_Y"] origin.depth = event_info["GAU_Z"] * factor origin.time = UTCDateTime(event_info["DT"]) event.origins.append(origin) ouc = OriginUncertainty() ce = ConfidenceEllipsoid() ce.semi_major_axis_length = event_info["COV_ErrY"] * factor ce.semi_intermediate_axis_length = event_info["COV_ErrX"] * factor ce.semi_minor_axis_length = event_info["COV_ErrZ"] * factor ce.major_axis_plunge = 0 ce.major_axis_azimuth = 0 ce.major_axis_rotation = 0 ouc.confidence_ellipsoid = ce ouc.preferred_description = "confidence ellipsoid" # Set uncertainties for both as the gaussian uncertainties for origin in event.origins: origin.longitude_errors.uncertainty = kilometer2degrees( event_info["GAU_ErrX"] * factor / 1e3) origin.latitude_errors.uncertainty = kilometer2degrees( event_info["GAU_ErrY"] * factor / 1e3) origin.depth_errors.uncertainty = event_info["GAU_ErrZ"] * factor origin.origin_uncertainty = ouc # Add OriginQuality info to each origin? for origin in event.origins: origin.origin_type = "hypocenter" origin.evaluation_mode = "automatic" # --- Handle picks file --- pick_file = locate_dir / "picks" / event_uid if pick_file.with_suffix(".picks").is_file(): picks = pd.read_csv(pick_file.with_suffix(".picks")) else: return None for _, pickline in picks.iterrows(): station = str(pickline["Station"]) phase = str(pickline["Phase"]) wid = WaveformStreamID(network_code="", station_code=station) for method in ["modelled", "autopick"]: pick = Pick() pick.extra = AttribDict() pick.waveform_id = wid pick.method_id = method pick.phase_hint = phase if method == "autopick" and str(pickline["PickTime"]) != "-1": pick.time = UTCDateTime(pickline["PickTime"]) pick.time_errors.uncertainty = float(pickline["PickError"]) pick.extra.snr = { "value": float(pickline["SNR"]), "namespace": ns } elif method == "modelled": pick.time = UTCDateTime(pickline["ModelledTime"]) else: continue event.picks.append(pick) # --- Handle amplitudes file --- amps_file = locate_dir / "amplitudes" / event_uid if amps_file.with_suffix(".amps").is_file(): amps = pd.read_csv(amps_file.with_suffix(".amps")) i = 0 for _, ampsline in amps.iterrows(): wid = WaveformStreamID(seed_string=ampsline["id"]) noise_amp = ampsline["Noise_amp"] / 1000 # mm to m for phase in ["P_amp", "S_amp"]: amp = Amplitude() if pd.isna(ampsline[phase]): continue amp.generic_amplitude = ampsline[phase] / 1000 # mm to m amp.generic_amplitude_errors.uncertainty = noise_amp amp.unit = "m" amp.type = "AML" amp.method_id = phase amp.period = 1 / ampsline[f"{phase[0]}_freq"] amp.time_window = TimeWindow( reference=UTCDateTime(ampsline[f"{phase[0]}_time"])) # amp.pick_id = ? amp.waveform_id = wid # amp.filter_id = ? amp.magnitude_hint = "ML" amp.evaluation_mode = "automatic" amp.extra = AttribDict() try: amp.extra.filter_gain = { "value": ampsline[f"{phase[0]}_filter_gain"], "namespace": ns } amp.extra.avg_amp = { "value": ampsline[f"{phase[0]}_avg_amp"] / 1000, # m "namespace": ns } except KeyError: pass if phase[0] == local_mag_ph and not pd.isna(ampsline["ML"]): i += 1 stat_mag = StationMagnitude() stat_mag.extra = AttribDict() # stat_mag.origin_id = ? local_mag_loc stat_mag.mag = ampsline["ML"] stat_mag.mag_errors.uncertainty = ampsline["ML_Err"] stat_mag.station_magnitude_type = "ML" stat_mag.amplitude_id = amp.resource_id stat_mag.extra.picked = { "value": ampsline["is_picked"], "namespace": ns } stat_mag.extra.epi_dist = { "value": ampsline["epi_dist"], "namespace": ns } stat_mag.extra.z_dist = { "value": ampsline["z_dist"], "namespace": ns } event.station_magnitudes.append(stat_mag) event.amplitudes.append(amp) mag = Magnitude() mag.extra = AttribDict() mag.mag = event_info["ML"] mag.mag_errors.uncertainty = event_info["ML_Err"] mag.magnitude_type = "ML" # mag.origin_id = ? mag.station_count = i mag.evaluation_mode = "automatic" mag.extra.r2 = {"value": event_info["ML_r2"], "namespace": ns} event.magnitudes = [mag] event.preferred_magnitude_id = mag.resource_id return event
def test_read_write(): """ Function to test the read and write capabilities of Sfile_util. """ import os from obspy.core.event import Pick, WaveformStreamID, Arrival, Amplitude from obspy.core.event import Catalog, Event, Origin, Magnitude from obspy.core.event import EventDescription, CreationInfo import obspy if int(obspy.__version__.split('.')[0]) >= 1: from obspy.core.event import read_events else: from obspy.core.event import readEvents as read_events from obspy import UTCDateTime # Set-up a test event test_event = Event() test_event.origins.append(Origin()) test_event.origins[0].time = UTCDateTime("2012-03-26") + 1 test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.origins[0].latitude = 45.0 test_event.origins[0].longitude = 25.0 test_event.origins[0].depth = 15.0 test_event.creation_info = CreationInfo(agency_id='TES') test_event.origins[0].time_errors['Time_Residual_RMS'] = 0.01 test_event.magnitudes.append(Magnitude()) test_event.magnitudes[0].mag = 0.1 test_event.magnitudes[0].magnitude_type = 'ML' test_event.magnitudes[0].creation_info = CreationInfo('TES') test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[1].mag = 0.5 test_event.magnitudes[1].magnitude_type = 'Mc' test_event.magnitudes[1].creation_info = CreationInfo('TES') test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[2].mag = 1.3 test_event.magnitudes[2].magnitude_type = 'Ms' test_event.magnitudes[2].creation_info = CreationInfo('TES') test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id # Define the test pick _waveform_id = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') test_event.picks.append( Pick(waveform_id=_waveform_id, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, horizontal_slowness=12, backazimuth=20)) test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m')) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[0].phase_hint, pick_id=test_event.picks[0].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) # Add the event to a catalogue which can be used for QuakeML testing test_cat = Catalog() test_cat += test_event # Write the catalog test_cat.write("Test_catalog.xml", format='QUAKEML') # Read and check read_cat = read_events("Test_catalog.xml") os.remove("Test_catalog.xml") assert read_cat[0].resource_id == test_cat[0].resource_id assert read_cat[0].picks == test_cat[0].picks assert read_cat[0].origins[0].resource_id ==\ test_cat[0].origins[0].resource_id assert read_cat[0].origins[0].time == test_cat[0].origins[0].time # Note that time_residuel_RMS is not a quakeML format assert read_cat[0].origins[0].longitude == test_cat[0].origins[0].longitude assert read_cat[0].origins[0].latitude == test_cat[0].origins[0].latitude assert read_cat[0].origins[0].depth == test_cat[0].origins[0].depth assert read_cat[0].magnitudes == test_cat[0].magnitudes assert read_cat[0].event_descriptions == test_cat[0].event_descriptions assert read_cat[0].amplitudes[0].resource_id ==\ test_cat[0].amplitudes[0].resource_id assert read_cat[0].amplitudes[0].period == test_cat[0].amplitudes[0].period assert read_cat[0].amplitudes[0].unit == test_cat[0].amplitudes[0].unit assert read_cat[0].amplitudes[0].generic_amplitude ==\ test_cat[0].amplitudes[0].generic_amplitude assert read_cat[0].amplitudes[0].pick_id ==\ test_cat[0].amplitudes[0].pick_id assert read_cat[0].amplitudes[0].waveform_id ==\ test_cat[0].amplitudes[0].waveform_id # Check the read-write s-file functionality sfile = eventtoSfile(test_cat[0], userID='TEST', evtype='L', outdir='.', wavefiles='test', explosion=True, overwrite=True) del read_cat assert readwavename(sfile) == ['test'] read_cat = Catalog() read_cat += readpicks(sfile) os.remove(sfile) assert read_cat[0].picks[0].time == test_cat[0].picks[0].time assert read_cat[0].picks[0].backazimuth == test_cat[0].picks[0].backazimuth assert read_cat[0].picks[0].onset == test_cat[0].picks[0].onset assert read_cat[0].picks[0].phase_hint == test_cat[0].picks[0].phase_hint assert read_cat[0].picks[0].polarity == test_cat[0].picks[0].polarity assert read_cat[0].picks[0].waveform_id.station_code ==\ test_cat[0].picks[0].waveform_id.station_code assert read_cat[0].picks[0].waveform_id.channel_code[-1] ==\ test_cat[0].picks[0].waveform_id.channel_code[-1] # assert read_cat[0].origins[0].resource_id ==\ # test_cat[0].origins[0].resource_id assert read_cat[0].origins[0].time == test_cat[0].origins[0].time # Note that time_residuel_RMS is not a quakeML format assert read_cat[0].origins[0].longitude == test_cat[0].origins[0].longitude assert read_cat[0].origins[0].latitude == test_cat[0].origins[0].latitude assert read_cat[0].origins[0].depth == test_cat[0].origins[0].depth assert read_cat[0].magnitudes[0].mag == test_cat[0].magnitudes[0].mag assert read_cat[0].magnitudes[1].mag == test_cat[0].magnitudes[1].mag assert read_cat[0].magnitudes[2].mag == test_cat[0].magnitudes[2].mag assert read_cat[0].magnitudes[0].creation_info ==\ test_cat[0].magnitudes[0].creation_info assert read_cat[0].magnitudes[1].creation_info ==\ test_cat[0].magnitudes[1].creation_info assert read_cat[0].magnitudes[2].creation_info ==\ test_cat[0].magnitudes[2].creation_info assert read_cat[0].magnitudes[0].magnitude_type ==\ test_cat[0].magnitudes[0].magnitude_type assert read_cat[0].magnitudes[1].magnitude_type ==\ test_cat[0].magnitudes[1].magnitude_type assert read_cat[0].magnitudes[2].magnitude_type ==\ test_cat[0].magnitudes[2].magnitude_type assert read_cat[0].event_descriptions == test_cat[0].event_descriptions # assert read_cat[0].amplitudes[0].resource_id ==\ # test_cat[0].amplitudes[0].resource_id assert read_cat[0].amplitudes[0].period == test_cat[0].amplitudes[0].period assert read_cat[0].amplitudes[0].snr == test_cat[0].amplitudes[0].snr del read_cat # assert read_cat[0].amplitudes[0].pick_id ==\ # test_cat[0].amplitudes[0].pick_id # assert read_cat[0].amplitudes[0].waveform_id ==\ # test_cat[0].amplitudes[0].waveform_id # Test the wrappers for PICK and EVENTINFO classes picks, evinfo = eventtopick(test_cat) # Test the conversion back conv_cat = Catalog() conv_cat.append(picktoevent(evinfo, picks)) assert conv_cat[0].picks[0].time == test_cat[0].picks[0].time assert conv_cat[0].picks[0].backazimuth == test_cat[0].picks[0].backazimuth assert conv_cat[0].picks[0].onset == test_cat[0].picks[0].onset assert conv_cat[0].picks[0].phase_hint == test_cat[0].picks[0].phase_hint assert conv_cat[0].picks[0].polarity == test_cat[0].picks[0].polarity assert conv_cat[0].picks[0].waveform_id.station_code ==\ test_cat[0].picks[0].waveform_id.station_code assert conv_cat[0].picks[0].waveform_id.channel_code[-1] ==\ test_cat[0].picks[0].waveform_id.channel_code[-1] # assert read_cat[0].origins[0].resource_id ==\ # test_cat[0].origins[0].resource_id assert conv_cat[0].origins[0].time == test_cat[0].origins[0].time # Note that time_residuel_RMS is not a quakeML format assert conv_cat[0].origins[0].longitude == test_cat[0].origins[0].longitude assert conv_cat[0].origins[0].latitude == test_cat[0].origins[0].latitude assert conv_cat[0].origins[0].depth == test_cat[0].origins[0].depth assert conv_cat[0].magnitudes[0].mag == test_cat[0].magnitudes[0].mag assert conv_cat[0].magnitudes[1].mag == test_cat[0].magnitudes[1].mag assert conv_cat[0].magnitudes[2].mag == test_cat[0].magnitudes[2].mag assert conv_cat[0].magnitudes[0].creation_info ==\ test_cat[0].magnitudes[0].creation_info assert conv_cat[0].magnitudes[1].creation_info ==\ test_cat[0].magnitudes[1].creation_info assert conv_cat[0].magnitudes[2].creation_info ==\ test_cat[0].magnitudes[2].creation_info assert conv_cat[0].magnitudes[0].magnitude_type ==\ test_cat[0].magnitudes[0].magnitude_type assert conv_cat[0].magnitudes[1].magnitude_type ==\ test_cat[0].magnitudes[1].magnitude_type assert conv_cat[0].magnitudes[2].magnitude_type ==\ test_cat[0].magnitudes[2].magnitude_type assert conv_cat[0].event_descriptions == test_cat[0].event_descriptions # assert read_cat[0].amplitudes[0].resource_id ==\ # test_cat[0].amplitudes[0].resource_id assert conv_cat[0].amplitudes[0].period == test_cat[0].amplitudes[0].period assert conv_cat[0].amplitudes[0].snr == test_cat[0].amplitudes[0].snr return True
def _read_picks(f, new_event): """ Internal pick reader. Use read_nordic instead. :type f: file :param f: File open in read mode :type wav_names: list :param wav_names: List of waveform files in the sfile :type new_event: :class:`~obspy.core.event.event.Event` :param new_event: event to associate picks with. :returns: :class:`~obspy.core.event.event.Event` """ f.seek(0) evtime = new_event.origins[0].time pickline = [] # Set a default, ignored later unless overwritten snr = None for line in f: if line[79] == '7': header = line break for line in f: if len(line.rstrip('\n').rstrip('\r')) in [80, 79] and \ line[79] in ' 4\n': pickline += [line] for line in pickline: if line[18:28].strip() == '': # If line is empty miss it continue weight = line[14] if weight == '_': phase = line[10:17] weight = 0 polarity = '' else: phase = line[10:14].strip() polarity = line[16] if weight == ' ': weight = 0 polarity_maps = {"": "undecidable", "C": "positive", "D": "negative"} try: polarity = polarity_maps[polarity] except KeyError: polarity = "undecidable" # It is valid nordic for the origin to be hour 23 and picks to be hour # 00 or 24: this signifies a pick over a day boundary. if int(line[18:20]) == 0 and evtime.hour == 23: day_add = 86400 pick_hour = 0 elif int(line[18:20]) == 24: day_add = 86400 pick_hour = 0 else: day_add = 0 pick_hour = int(line[18:20]) try: time = UTCDateTime(evtime.year, evtime.month, evtime.day, pick_hour, int(line[20:22]), float(line[23:28])) + day_add except ValueError: time = UTCDateTime(evtime.year, evtime.month, evtime.day, int(line[18:20]), pick_hour, float("0." + line[23:38].split('.')[1])) +\ 60 + day_add # Add 60 seconds on to the time, this copes with s-file # preference to write seconds in 1-60 rather than 0-59 which # datetime objects accept if header[57:60] == 'AIN': ain = _float_conv(line[57:60]) warnings.warn('AIN: %s in header, currently unsupported' % ain) elif header[57:60] == 'SNR': snr = _float_conv(line[57:60]) else: warnings.warn('%s is not currently supported' % header[57:60]) # finalweight = _int_conv(line[68:70]) # Create a new obspy.event.Pick class for this pick _waveform_id = WaveformStreamID(station_code=line[1:6].strip(), channel_code=line[6:8].strip(), network_code='NA') pick = Pick(waveform_id=_waveform_id, phase_hint=phase, polarity=polarity, time=time) try: pick.onset = onsets[line[9]] except KeyError: pass if line[15] == 'A': pick.evaluation_mode = 'automatic' else: pick.evaluation_mode = 'manual' # Note these two are not always filled - velocity conversion not yet # implemented, needs to be converted from km/s to s/deg # if not velocity == 999.0: # new_event.picks[pick_index].horizontal_slowness = 1.0 / velocity if _float_conv(line[46:51]) is not None: pick.backazimuth = _float_conv(line[46:51]) # Create new obspy.event.Amplitude class which references above Pick # only if there is an amplitude picked. if _float_conv(line[33:40]) is not None: _amplitude = Amplitude(generic_amplitude=_float_conv(line[33:40]), period=_float_conv(line[41:45]), pick_id=pick.resource_id, waveform_id=pick.waveform_id) if pick.phase_hint == 'IAML': # Amplitude for local magnitude _amplitude.type = 'AML' # Set to be evaluating a point in the trace _amplitude.category = 'point' # Default AML unit in seisan is nm (Page 139 of seisan # documentation, version 10.0) _amplitude.generic_amplitude /= 1e9 _amplitude.unit = 'm' _amplitude.magnitude_hint = 'ML' else: # Generic amplitude type _amplitude.type = 'A' if snr: _amplitude.snr = snr new_event.amplitudes.append(_amplitude) elif _int_conv(line[28:33]) is not None: # Create an amplitude instance for code duration also _amplitude = Amplitude(generic_amplitude=_int_conv(line[28:33]), pick_id=pick.resource_id, waveform_id=pick.waveform_id) # Amplitude for coda magnitude _amplitude.type = 'END' # Set to be evaluating a point in the trace _amplitude.category = 'duration' _amplitude.unit = 's' _amplitude.magnitude_hint = 'Mc' if snr is not None: _amplitude.snr = snr new_event.amplitudes.append(_amplitude) # Create new obspy.event.Arrival class referencing above Pick if _float_conv(line[33:40]) is None: arrival = Arrival(phase=pick.phase_hint, pick_id=pick.resource_id) if weight is not None: arrival.time_weight = weight if _int_conv(line[60:63]) is not None: arrival.backazimuth_residual = _int_conv(line[60:63]) if _float_conv(line[63:68]) is not None: arrival.time_residual = _float_conv(line[63:68]) if _float_conv(line[70:75]) is not None: arrival.distance = kilometers2degrees(_float_conv(line[70:75])) if _int_conv(line[76:79]) is not None: arrival.azimuth = _int_conv(line[76:79]) new_event.origins[0].arrivals.append(arrival) new_event.picks.append(pick) return new_event
def amp_pick_event(event, st, respdir, chans=['Z'], var_wintype=True, winlen=0.9, pre_pick=0.2, pre_filt=True, lowcut=1.0, highcut=20.0, corners=4, min_snr=1.0, plot=False, remove_old=False, ps_multiplier=0.34, velocity=False): """ Pick amplitudes for local magnitude for a single event. Looks for maximum peak-to-trough amplitude for a channel in a stream, and picks this amplitude and period. There are a few things it does internally to stabilise the result: 1. Applies a given filter to the data - very necessary for small magnitude earthquakes; 2. Keeps track of the poles and zeros of this filter and removes them from the picked amplitude; 3. Picks the peak-to-trough amplitude, but records half of this: the specification for the local magnitude is to use a peak amplitude on a horizontal, however, with modern digital seismometers, the peak amplitude often has an additional, DC-shift applied to it, to stabilise this, and to remove possible issues with de-meaning data recorded during the wave-train of an event (e.g. the mean may not be the same as it would be for longer durations), we use half the peak-to-trough amplitude; 4. Despite the original definition of local magnitude requiring the use of a horizontal channel, more recent work has shown that the vertical channels give more consistent magnitude estimations between stations, due to a reduction in site-amplification effects, we therefore use the vertical channels by default, but allow the user to chose which channels they deem appropriate; 5. We do not specify that the maximum amplitude should be the S-phase: The original definition holds that the maximum body-wave amplitude should be used - while this is often the S-phase, we do not discriminate against the P-phase. We do note that, unless the user takes care when assigning winlen and filters, they may end up with amplitude picks for surface waves; 6. We use a variable window-length by default that takes into account P-S times if available, this is in an effort to include only the body waves. When P-S times are not available we us the ps_multiplier variable, which defaults to 0.34 x hypocentral distance. :type event: obspy.core.event.event.Event :param event: Event to pick :type st: obspy.core.stream.Stream :param st: Stream associated with event :type respdir: str :param respdir: Path to the response information directory :type chans: list :param chans: List of the channels to pick on, defaults to ['Z'] - should just be the orientations, e.g. Z, 1, 2, N, E :type var_wintype: bool :param var_wintype: If True, the winlen will be multiplied by the P-S time if both P and S picks are available, otherwise it will be multiplied by the hypocentral distance*ps_multiplier, defaults to True :type winlen: float :param winlen: Length of window, see above parameter, if var_wintype is False then this will be in seconds, otherwise it is the multiplier to the p-s time, defaults to 0.9. :type pre_pick: float :param pre_pick: Time before the s-pick to start the cut window, defaults to 0.2. :type pre_filt: bool :param pre_filt: To apply a pre-filter or not, defaults to True :type lowcut: float :param lowcut: Lowcut in Hz for the pre-filter, defaults to 1.0 :type highcut: float :param highcut: Highcut in Hz for the pre-filter, defaults to 20.0 :type corners: int :param corners: Number of corners to use in the pre-filter :type min_snr: float :param min_snr: Minimum signal-to-noise ratio to allow a pick - see note below on signal-to-noise ratio calculation. :type plot: bool :param plot: Turn plotting on or off. :type remove_old: bool :param remove_old: If True, will remove old amplitude picks from event and overwrite with new picks. Defaults to False. :type ps_multiplier: float :param ps_multiplier: A p-s time multiplier of hypocentral distance - defaults to 0.34, based on p-s ratio of 1.68 and an S-velocity 0f 1.5km/s, deliberately chosen to be quite slow. :type velocity: bool :param velocity: Whether to make the pick in velocity space or not. Original definition of local magnitude used displacement of Wood-Anderson, MLv in seiscomp and Antelope uses a velocity measurement. :returns: Picked event :rtype: :class:`obspy.core.event.Event` .. Note:: Signal-to-noise ratio is calculated using the filtered data by dividing the maximum amplitude in the signal window (pick window) by the normalized noise amplitude (taken from the whole window supplied). .. Warning:: Works in place on data - will filter and remove response from data, you are recommended to give this function a copy of the data if you are using it in a loop. """ # Convert these picks into a lists stations = [] # List of stations channels = [] # List of channels picktimes = [] # List of pick times picktypes = [] # List of pick types picks_out = [] try: depth = _get_origin(event).depth except MatchFilterError: depth = 0 if remove_old and event.amplitudes: for amp in event.amplitudes: # Find the pick and remove it too pick = [p for p in event.picks if p.resource_id == amp.pick_id][0] event.picks.remove(pick) event.amplitudes = [] for pick in event.picks: if pick.phase_hint in ['P', 'S']: picks_out.append(pick) # Need to be able to remove this if there # isn't data for a station! stations.append(pick.waveform_id.station_code) channels.append(pick.waveform_id.channel_code) picktimes.append(pick.time) picktypes.append(pick.phase_hint) if len(picktypes) == 0: warnings.warn('No P or S picks found') st.merge() # merge the data, just in case! # For each station cut the window uniq_stas = list(set(stations)) for sta in uniq_stas: for chan in chans: print('Working on ' + sta + ' ' + chan) tr = st.select(station=sta, channel='*' + chan) if not tr: warnings.warn( 'There is no station and channel match in the wavefile!') continue else: tr = tr[0] # Apply the pre-filter if pre_filt: try: tr.split().detrend('simple').merge(fill_value=0) except: print('Some issue splitting this one') dummy = tr.split() dummy.detrend('simple') tr = dummy.merge(fill_value=0) try: tr.filter('bandpass', freqmin=lowcut, freqmax=highcut, corners=corners) except NotImplementedError: print('For some reason trace is not continuous:') print(tr) continue # Find the response information resp_info = _find_resp(tr.stats.station, tr.stats.channel, tr.stats.network, tr.stats.starttime, tr.stats.delta, respdir) PAZ = [] seedresp = [] if resp_info and 'gain' in resp_info: PAZ = resp_info elif resp_info: seedresp = resp_info # Simulate a Wood Anderson Seismograph if PAZ and len(tr.data) > 10: # Set ten data points to be the minimum to pass tr = _sim_WA(tr, PAZ, None, 10, velocity=velocity) elif seedresp and len(tr.data) > 10: tr = _sim_WA(tr, None, seedresp, 10, velocity=velocity) elif len(tr.data) > 10: warnings.warn('No PAZ for ' + tr.stats.station + ' ' + tr.stats.channel + ' at time: ' + str(tr.stats.starttime)) continue sta_picks = [i for i in range(len(stations)) if stations[i] == sta] pick_id = event.picks[sta_picks[0]].resource_id arrival = [ arrival for arrival in event.origins[0].arrivals if arrival.pick_id == pick_id ][0] hypo_dist = np.sqrt( np.square(degrees2kilometers(arrival.distance)) + np.square(depth / 1000)) if var_wintype and hypo_dist: if 'S' in [picktypes[i] for i in sta_picks] and\ 'P' in [picktypes[i] for i in sta_picks]: # If there is an S-pick we can use this :D s_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'S' ] s_pick = min(s_pick) p_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'P' ] p_pick = min(p_pick) try: tr.trim(starttime=s_pick - pre_pick, endtime=s_pick + (s_pick - p_pick) * winlen) except ValueError: continue elif 'S' in [picktypes[i] for i in sta_picks]: s_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'S' ] s_pick = min(s_pick) p_modelled = s_pick - (hypo_dist * ps_multiplier) try: tr.trim(starttime=s_pick - pre_pick, endtime=s_pick + (s_pick - p_modelled) * winlen) except ValueError: continue else: # In this case we only have a P pick p_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'P' ] p_pick = min(p_pick) s_modelled = p_pick + (hypo_dist * ps_multiplier) print('P_pick=%s' % str(p_pick)) print('hypo_dist: %s' % str(hypo_dist)) print('S modelled=%s' % str(s_modelled)) try: tr.trim(starttime=s_modelled - pre_pick, endtime=s_modelled + (s_modelled - p_pick) * winlen) print(tr) except ValueError: continue # Work out the window length based on p-s time or distance elif 'S' in [picktypes[i] for i in sta_picks]: # If the window is fixed we still need to find the start time, # which can be based either on the S-pick (this elif), or # on the hypocentral distance and the P-pick # Take the minimum S-pick time if more than one S-pick is # available s_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'S' ] s_pick = min(s_pick) try: tr.trim(starttime=s_pick - pre_pick, endtime=s_pick + winlen) except ValueError: continue else: # In this case, there is no S-pick and the window length is # fixed we need to calculate an expected S_pick based on the # hypocentral distance, this will be quite hand-wavey as we # are not using any kind of velocity model. p_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'P' ] print(picktimes) p_pick = min(p_pick) s_modelled = p_pick + hypo_dist * ps_multiplier try: tr.trim(starttime=s_modelled - pre_pick, endtime=s_modelled + winlen) except ValueError: continue if len(tr.data) <= 10: warnings.warn('No data found for: ' + tr.stats.station) continue # Get the amplitude try: amplitude, period, delay = _max_p2t(tr.data, tr.stats.delta) except ValueError: print('No amplitude picked for tr %s' % str(tr)) continue # Calculate the normalized noise amplitude noise_amplitude = np.sqrt(np.mean(np.square(tr.data))) if amplitude == 0.0: continue if amplitude / noise_amplitude < min_snr: print('Signal to noise ratio of %s is below threshold.' % (amplitude / noise_amplitude)) continue if plot: plt.plot(np.arange(len(tr.data)), tr.data, 'k') plt.scatter(tr.stats.sampling_rate * delay, amplitude / 2) plt.scatter(tr.stats.sampling_rate * (delay + period), -amplitude / 2) plt.show() print('Amplitude picked: ' + str(amplitude)) print('Signal-to-noise ratio is: %s' % (amplitude / noise_amplitude)) # Note, amplitude should be in meters at the moment! # Remove the pre-filter response if pre_filt: # Generate poles and zeros for the filter we used earlier: this # is how the filter is designed in the convenience methods of # filtering in obspy. z, p, k = iirfilter(corners, [ lowcut / (0.5 * tr.stats.sampling_rate), highcut / (0.5 * tr.stats.sampling_rate) ], btype='band', ftype='butter', output='zpk') filt_paz = { 'poles': list(p), 'zeros': list(z), 'gain': k, 'sensitivity': 1.0 } amplitude /= ( paz_2_amplitude_value_of_freq_resp(filt_paz, 1 / period) * filt_paz['sensitivity']) if PAZ: amplitude /= 1000 if seedresp: # Seedresp method returns mm amplitude *= 1000000 # Write out the half amplitude, approximately the peak amplitude as # used directly in magnitude calculations amplitude *= 0.5 # Append an amplitude reading to the event _waveform_id = WaveformStreamID(station_code=tr.stats.station, channel_code=tr.stats.channel, network_code=tr.stats.network) pick_ind = len(event.picks) event.picks.append( Pick(waveform_id=_waveform_id, phase_hint='IAML', polarity='undecidable', time=tr.stats.starttime + delay, evaluation_mode='automatic')) if not velocity: event.amplitudes.append( Amplitude(generic_amplitude=amplitude / 1e9, period=period, pick_id=event.picks[pick_ind].resource_id, waveform_id=event.picks[pick_ind].waveform_id, unit='m', magnitude_hint='ML', type='AML', category='point')) else: event.amplitudes.append( Amplitude(generic_amplitude=amplitude / 1e9, period=period, pick_id=event.picks[pick_ind].resource_id, waveform_id=event.picks[pick_ind].waveform_id, unit='m/s', magnitude_hint='ML', type='AML', category='point')) return event
def __toAmplitude(parser, el): """ """ amp = Amplitude() amp.resource_id = ResourceIdentifier( prefix="/".join([RESOURCE_ROOT, "amplitude"])) if CURRENT_TYPE == "obspyck": amp.method_id = "%s/amplitude_method/obspyck/1" % RESOURCE_ROOT else: msg = "encountered non-obspyck amplitude!" raise Exception(msg) amp.generic_amplitude, amp.generic_amplitude_errors = \ __toFloatQuantity(parser, el, "genericAmplitude") amp.unit = parser.xpath2obj('unit', el, str) waveform = el.xpath("waveform")[0] network = waveform.get("networkCode") station = fix_station_name(waveform.get("stationCode")) # Map some station names. if station in STATION_DICT: station = STATION_DICT[station] if not network: network = NETWORK_DICT[station] location = waveform.get("locationCode") or "" channel = waveform.get("channelCode") or "" amp.waveform_id = WaveformStreamID(network_code=network, station_code=station, channel_code=channel, location_code=location) # Amplitudes without generic_amplitude are not quakeml conform if amp.generic_amplitude is None: print( "Amplitude has no generic_amplitude value and is " "ignored: %s" % station) return None # generate time_window amp.time_window = TimeWindow() t_min = parser.xpath2obj('timeWindow/reference', el, UTCDateTime) t_max = t_min + parser.xpath2obj('timeWindow/end', el, float) dt_abs = abs(t_max - t_min) amp.time_window.reference = t_min if t_min < t_max: amp.time_window.begin = 0.0 amp.time_window.end = dt_abs else: amp.time_window.begin = dt_abs amp.time_window.end = 0.0 # we have so far saved frequency in Hz as "period" tag # use two times dt instead ##amp.period = 1.0 / parser.xpath2obj('period', el, float) amp.period = 2.0 * dt_abs t = ("PGV; reference time is time of minimum amplitude. if minimum comes " "before maximum, start is 0 and end is relative time of maximum. " "otherwise end is 0, start is relative time of minimum.") comment = Comment(force_resource_id=False, resource_id=None, text=t) amp.comments.append(comment) return amp
def amp_pick_event(event, st, inventory, chans=['Z'], var_wintype=True, winlen=0.9, pre_pick=0.2, pre_filt=True, lowcut=1.0, highcut=20.0, corners=4, min_snr=1.0, plot=False, remove_old=False, ps_multiplier=0.34, velocity=False, water_level=0): """ Pick amplitudes for local magnitude for a single event. Looks for maximum peak-to-trough amplitude for a channel in a stream, and picks this amplitude and period. There are a few things it does internally to stabilise the result: 1. Applies a given filter to the data using obspy's bandpass filter. The filter applied is a time-domain digital SOS filter. This is often necessary for small magnitude earthquakes. To correct for this filter later the gain of the filter at the period of the maximum amplitude is retrieved using scipy's sosfreqz, and used to divide the resulting picked amplitude. 2. Picks the peak-to-trough amplitude, but records half of this to cope with possible DC offsets. 3. Despite the original definition of local magnitude requiring the use of a horizontal channel, more recent work has shown that the vertical channels give more consistent magnitude estimations between stations, due to a reduction in site-amplification effects, we therefore use the vertical channels by default, but allow the user to chose which channels they deem appropriate; 4. The maximum amplitude within the given window is picked. Care must be taken to avoid including surface waves in the window; 6. A variable window-length is used by default that takes into account P-S times if available, this is in an effort to include only the body waves. When P-S times are not available the ps_multiplier variable is used, which defaults to 0.34 x hypocentral distance. :type event: obspy.core.event.event.Event :param event: Event to pick :type st: obspy.core.stream.Stream :param st: Stream associated with event :type inventory: obspy.core.inventory.Inventory :param inventory: Inventory containing response information for the stations in st. :type chans: list :param chans: List of the channels to pick on, defaults to ['Z'] - should just be the orientations, e.g. Z, 1, 2, N, E :type var_wintype: bool :param var_wintype: If True, the winlen will be multiplied by the P-S time if both P and S picks are available, otherwise it will be multiplied by the hypocentral distance*ps_multiplier, defaults to True :type winlen: float :param winlen: Length of window, see above parameter, if var_wintype is False then this will be in seconds, otherwise it is the multiplier to the p-s time, defaults to 0.9. :type pre_pick: float :param pre_pick: Time before the s-pick to start the cut window, defaults to 0.2. :type pre_filt: bool :param pre_filt: To apply a pre-filter or not, defaults to True :type lowcut: float :param lowcut: Lowcut in Hz for the pre-filter, defaults to 1.0 :type highcut: float :param highcut: Highcut in Hz for the pre-filter, defaults to 20.0 :type corners: int :param corners: Number of corners to use in the pre-filter :type min_snr: float :param min_snr: Minimum signal-to-noise ratio to allow a pick - see note below on signal-to-noise ratio calculation. :type plot: bool :param plot: Turn plotting on or off. :type remove_old: bool :param remove_old: If True, will remove old amplitudes and associated picks from event and overwrite with new picks. Defaults to False. :type ps_multiplier: float :param ps_multiplier: A p-s time multiplier of hypocentral distance - defaults to 0.34, based on p-s ratio of 1.68 and an S-velocity 0f 1.5km/s, deliberately chosen to be quite slow. :type velocity: bool :param velocity: Whether to make the pick in velocity space or not. Original definition of local magnitude used displacement of Wood-Anderson, MLv in seiscomp and Antelope uses a velocity measurement. :type water_level: float :param water_level: Water-level for seismometer simulation, see :returns: Picked event :rtype: :class:`obspy.core.event.Event` .. Note:: Signal-to-noise ratio is calculated using the filtered data by dividing the maximum amplitude in the signal window (pick window) by the normalized noise amplitude (taken from the whole window supplied). """ try: event_origin = event.preferred_origin() or event.origins[0] except IndexError: event_origin = Origin() depth = event_origin.depth if depth is None: Logger.warning("No depth for the event, setting to 0 km") depth = 0 # Remove amplitudes and picks for those amplitudes - this is not always # safe: picks may not be exclusively linked to amplitudes - hence the # default is *not* to do this. if remove_old and event.amplitudes: removal_ids = {amp.pick_id for amp in event.amplitudes} event.picks = [ p for p in event.picks if p.resource_id not in removal_ids ] event.amplitudes = [] # We just want to look at P and S picks. picks = [ p for p in event.picks if p.phase_hint and p.phase_hint[0].upper() in ("P", "S") ] if len(picks) == 0: Logger.warning('No P or S picks found') return event st = st.copy().merge() # merge the data, just in case! Work on a copy. # For each station cut the window for sta in {p.waveform_id.station_code for p in picks}: for chan in chans: Logger.info(f'Working on {sta} {chan}') tr = st.select(station=sta, component=chan) if not tr: Logger.warning(f'{sta} {chan} not found in the stream.') continue tr = tr.merge()[0] # Apply the pre-filter if pre_filt: tr = tr.split().detrend('simple').merge(fill_value=0)[0] tr.filter('bandpass', freqmin=lowcut, freqmax=highcut, corners=corners) tr = _sim_WA(tr, inventory, water_level=water_level, velocity=velocity) if tr is None: # None returned when no matching response is found continue # Get the distance from an appropriate arrival sta_picks = [p for p in picks if p.waveform_id.station_code == sta] distances = [] for pick in sta_picks: distances += [ a.distance for a in event_origin.arrivals if a.pick_id == pick.resource_id and a.distance is not None ] if len(distances) == 0: Logger.error(f"Arrivals for station: {sta} do not contain " "distances. Have you located this event?") hypo_dist = None else: # They should all be the same, but take the mean to be sure... distance = np.mean(distances) hypo_dist = np.sqrt( np.square(degrees2kilometers(distance)) + np.square(depth / 1000)) # Get the earliest P and S picks on this station phase_picks = {"P": None, "S": None} for _hint in phase_picks.keys(): _picks = sorted( [p for p in sta_picks if p.phase_hint[0].upper() == _hint], key=lambda p: p.time) if len(_picks) > 0: phase_picks[_hint] = _picks[0] p_pick = phase_picks["P"] s_pick = phase_picks["S"] # Get the window size. if var_wintype: if p_pick and s_pick: p_time, s_time = p_pick.time, s_pick.time elif s_pick and hypo_dist: s_time = s_pick.time p_time = s_time - (hypo_dist * ps_multiplier) elif p_pick and hypo_dist: p_time = p_pick.time s_time = p_time + (hypo_dist * ps_multiplier) elif (s_pick or p_pick) and hypo_dist is None: Logger.error( "No hypocentral distance and no matching P and S " f"picks for {sta}, skipping.") continue else: raise NotImplementedError( "No p or s picks - you should not have been able to " "get here") trim_start = s_time - pre_pick trim_end = s_time + (s_time - p_time) * winlen # Work out the window length based on p-s time or distance else: # Fixed window-length if s_pick: s_time = s_pick.time elif p_pick and hypo_dist: # In this case, there is no S-pick and the window length is # fixed we need to calculate an expected S_pick based on # the hypocentral distance, this will be quite hand-wavey # as we are not using any kind of velocity model. s_time = p_pick.time + hypo_dist * ps_multiplier else: Logger.warning( "No s-pick or hypocentral distance to predict " f"s-arrival for station {sta}, skipping") continue trim_start = s_time - pre_pick trim_end = s_time + winlen tr = tr.trim(trim_start, trim_end) if len(tr.data) <= 10: Logger.warning(f'Insufficient data for {sta}') continue # Get the amplitude try: amplitude, period, delay, peak, trough = _max_p2t( tr.data, tr.stats.delta, return_peak_trough=True) except ValueError as e: Logger.error(e) Logger.error(f'No amplitude picked for tr {tr.id}') continue # Calculate the normalized noise amplitude snr = amplitude / np.sqrt(np.mean(np.square(tr.data))) if amplitude == 0.0: continue if snr < min_snr: Logger.info( f'Signal to noise ratio of {snr} is below threshold.') continue if plot: plt.plot(np.arange(len(tr.data)), tr.data, 'k') plt.scatter(tr.stats.sampling_rate * delay, peak) plt.scatter(tr.stats.sampling_rate * (delay + period / 2), trough) plt.show() Logger.info(f'Amplitude picked: {amplitude}') Logger.info(f'Signal-to-noise ratio is: {snr}') # Note, amplitude should be in meters at the moment! # Remove the pre-filter response if pre_filt: # Generate poles and zeros for the filter we used earlier. # We need to get the gain for the digital SOS filter used by # obspy. sos = iirfilter(corners, [ lowcut / (0.5 * tr.stats.sampling_rate), highcut / (0.5 * tr.stats.sampling_rate) ], btype='band', ftype='butter', output='sos') _, gain = sosfreqz(sos, worN=[1 / period], fs=tr.stats.sampling_rate) gain = np.abs(gain[0]) # Convert from complex to real. amplitude /= gain Logger.debug(f"Removed filter gain: {gain}") # Write out the half amplitude, approximately the peak amplitude as # used directly in magnitude calculations amplitude *= 0.5 # Append an amplitude reading to the event _waveform_id = WaveformStreamID(station_code=tr.stats.station, channel_code=tr.stats.channel, network_code=tr.stats.network) pick = Pick(waveform_id=_waveform_id, phase_hint='IAML', polarity='undecidable', time=tr.stats.starttime + delay, evaluation_mode='automatic') event.picks.append(pick) if not velocity: event.amplitudes.append( Amplitude(generic_amplitude=amplitude, period=period, pick_id=pick.resource_id, waveform_id=pick.waveform_id, unit='m', magnitude_hint='ML', type='AML', category='point')) else: event.amplitudes.append( Amplitude(generic_amplitude=amplitude, period=period, pick_id=pick.resource_id, waveform_id=pick.waveform_id, unit='m/s', magnitude_hint='ML', type='AML', category='point')) return event
def basic_test_event(): """ Function to generate a basic, full test event """ from obspy.core.event import Pick, WaveformStreamID, Arrival, Amplitude from obspy.core.event import Event, Origin, Magnitude from obspy.core.event import EventDescription, CreationInfo from obspy import UTCDateTime test_event = Event() test_event.origins.append(Origin()) test_event.origins[0].time = UTCDateTime("2012-03-26") + 1 test_event.event_descriptions.append(EventDescription()) test_event.event_descriptions[0].text = 'LE' test_event.origins[0].latitude = 45.0 test_event.origins[0].longitude = 25.0 test_event.origins[0].depth = 15000 test_event.creation_info = CreationInfo(agency_id='TES') test_event.origins[0].time_errors['Time_Residual_RMS'] = 0.01 test_event.magnitudes.append(Magnitude()) test_event.magnitudes[0].mag = 0.1 test_event.magnitudes[0].magnitude_type = 'ML' test_event.magnitudes[0].creation_info = CreationInfo('TES') test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[1].mag = 0.5 test_event.magnitudes[1].magnitude_type = 'Mc' test_event.magnitudes[1].creation_info = CreationInfo('TES') test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id test_event.magnitudes.append(Magnitude()) test_event.magnitudes[2].mag = 1.3 test_event.magnitudes[2].magnitude_type = 'Ms' test_event.magnitudes[2].creation_info = CreationInfo('TES') test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id # Define the test pick _waveform_id = WaveformStreamID(station_code='FOZ', channel_code='SHZ', network_code='NZ') test_event.picks.append( Pick(waveform_id=_waveform_id, onset='impulsive', phase_hint='PN', polarity='positive', time=UTCDateTime("2012-03-26") + 1.68, horizontal_slowness=12, backazimuth=20)) test_event.amplitudes.append( Amplitude(generic_amplitude=2.0, period=0.4, pick_id=test_event.picks[0].resource_id, waveform_id=test_event.picks[0].waveform_id, unit='m')) test_event.origins[0].arrivals.append( Arrival(time_weight=2, phase=test_event.picks[0].phase_hint, pick_id=test_event.picks[0].resource_id, backazimuth_residual=5, time_residual=0.2, distance=15, azimuth=25)) return test_event
def readpicks(sfile): """ Read all pick information from the s-file to an obspy.event.Catalog type. .. note:: This was changed for version 0.1.0 from using the inbuilt \ PICK class. :type sfile: str :param sfile: Path to sfile :return: obspy.core.event.Event .. warning:: Currently finalweight is unsupported, nor is velocity, \ or angle of incidence. This is because obspy.event stores slowness \ in s/deg and takeoff angle, which would require computation from the \ values stored in seisan. Multiple weights are also not supported in \ Obspy.event. .. rubric:: Example >>> event = readpicks('eqcorrscan/tests/test_data/REA/TEST_/' + ... '01-0411-15L.S201309') >>> print(event.origins[0].time) 2013-09-01T04:11:15.700000Z >>> print(event.picks[0].time) 2013-09-01T04:11:17.240000Z """ from obspy.core.event import Pick, WaveformStreamID, Arrival, Amplitude # Get wavefile name for use in resource_ids wav_names = readwavename(sfile) # First we need to read the header to get the timing info new_event = readheader(sfile) evtime = new_event.origins[0].time f = open(sfile, 'r') pickline = [] # Set a default, ignored later unless overwritten SNR = 999 if 'headerend' in locals(): del headerend for lineno, line in enumerate(f): if 'headerend' in locals(): if len(line.rstrip('\n').rstrip('\r')) in [80, 79] and \ (line[79] == ' ' or line[79] == '4' or line[79] == '\n'): pickline += [line] elif line[79] == '7': header = line headerend = lineno amplitude_index = 0 for pick_index, line in enumerate(pickline): if line[18:28].strip() == '': # If line is empty miss it continue station = line[1:6].strip() channel = line[6:8].strip() network = 'NA' # No network information provided in Sfile. weight = line[14] if weight == '_': phase = line[10:17] weight = 0 polarity = '' else: phase = line[10:14].strip() polarity = line[16] if weight == ' ': weight = 0 if polarity == '': polarity = "undecidable" elif polarity == 'C': polarity = "positive" elif polarity == 'D': polarity = 'negative' else: polarity = "undecidable" if int(line[18:20]) == 24: pickhr = 0 pickday = evtime + 86400 else: pickhr = int(line[18:20]) pickday = evtime try: time = UTCDateTime(pickday.year, pickday.month, pickday.day, pickhr, int(line[20:22]), int(line[23:28].split('.')[0]), int(line[23:28].split('.')[1]) * 10000) except ValueError: time = UTCDateTime(evtime.year, evtime.month, evtime.day, pickhr, int(line[20:22]), 0, 0) time += 60 # Add 60 seconds on to the time, this copes with s-file # preference to write seconds in 1-60 rather than 0-59 which # datetime objects accept coda = _int_conv(line[28:33]) amplitude = _float_conv(line[33:40]) peri = _float_conv(line[41:45]) azimuth = _float_conv(line[46:51]) velocity = _float_conv(line[52:56]) if header[57:60] == 'AIN': AIN = _float_conv(line[57:60]) elif header[57:60] == 'SNR': SNR = _float_conv(line[57:60]) azimuthres = _int_conv(line[60:63]) timeres = _float_conv(line[63:68]) finalweight = _int_conv(line[68:70]) distance = kilometer2degrees(_float_conv(line[70:75])) CAZ = _int_conv(line[76:79]) # Create a new obspy.event.Pick class for this pick _waveform_id = WaveformStreamID(station_code=station, channel_code=channel, network_code=network) new_event.picks.append( Pick(waveform_id=_waveform_id, phase_hint=phase, polarity=polarity, time=time)) if line[9] == 'I': new_event.picks[pick_index].onset = 'impulsive' elif line[9] == 'E': new_event.picks[pick_index].onset = 'emergent' if line[15] == 'A': new_event.picks[pick_index].evaluation_mode = 'automatic' else: new_event.picks[pick_index].evaluation_mode = 'manual' # Note these two are not always filled - velocity conversion not yet # implimented, needs to be converted from km/s to s/deg # if not velocity == 999.0: # new_event.picks[pick_index].horizontal_slowness = 1.0 / velocity if not azimuth == 999: new_event.picks[pick_index].backazimuth = azimuth del _waveform_id # Create new obspy.event.Amplitude class which references above Pick # only if there is an amplitude picked. if not amplitude == 999.0: new_event.amplitudes.append( Amplitude(generic_amplitude=amplitude, period=peri, pick_id=new_event.picks[pick_index].resource_id, waveform_id=new_event.picks[pick_index].waveform_id)) if new_event.picks[pick_index].phase_hint == 'IAML': # Amplitude for local magnitude new_event.amplitudes[amplitude_index].type = 'AML' # Set to be evaluating a point in the trace new_event.amplitudes[amplitude_index].category = 'point' # Default AML unit in seisan is nm (Page 139 of seisan # documentation, version 10.0) new_event.amplitudes[amplitude_index].generic_amplitude /=\ 10**9 new_event.amplitudes[amplitude_index].unit = 'm' new_event.amplitudes[amplitude_index].magnitude_hint = 'ML' else: # Generic amplitude type new_event.amplitudes[amplitude_index].type = 'A' if not SNR == 999.0: new_event.amplitudes[amplitude_index].snr = SNR amplitude_index += 1 elif not coda == 999: # Create an amplitude instance for code duration also new_event.amplitudes.append( Amplitude(generic_amplitude=coda, pick_id=new_event.picks[pick_index].resource_id, waveform_id=new_event.picks[pick_index].waveform_id)) # Amplitude for coda magnitude new_event.amplitudes[amplitude_index].type = 'END' # Set to be evaluating a point in the trace new_event.amplitudes[amplitude_index].category = 'duration' new_event.amplitudes[amplitude_index].unit = 's' new_event.amplitudes[amplitude_index].magnitude_hint = 'Mc' if SNR and not SNR == 999.0: new_event.amplitudes[amplitude_index].snr = SNR amplitude_index += 1 # Create new obspy.event.Arrival class referencing above Pick new_event.origins[0].arrivals.append( Arrival(phase=new_event.picks[pick_index].phase_hint, pick_id=new_event.picks[pick_index].resource_id)) if weight != 999: new_event.origins[0].arrivals[pick_index].time_weight =\ weight if azimuthres != 999: new_event.origins[0].arrivals[pick_index].backazimuth_residual =\ azimuthres if timeres != 999: new_event.origins[0].arrivals[pick_index].time_residual =\ timeres if distance != 999: new_event.origins[0].arrivals[pick_index].distance =\ distance if CAZ != 999: new_event.origins[0].arrivals[pick_index].azimuth =\ CAZ f.close() # Write event to catalog object for ease of .write() method return new_event
def process(self, event): print("Finished picking.") for trace_id, picker in self.p_picks.items(): if picker.time is not None: if picker.polarity == "up": polarity = "positive" elif picker.polarity == "down": polarity = "negative" else: polarity = "undecidable" self.event_out.picks.append(Pick( phase_hint="P", time=picker.time, waveform_id=WaveformStreamID(seed_string=trace_id), evaluation_mode="manual", polarity=polarity, creation_info=CreationInfo(author=getpass.getuser()))) for trace_id, picker in self.s_picks.items(): if picker.time is not None: self.event_out.picks.append(Pick( phase_hint="S", time=picker.time, waveform_id=WaveformStreamID(seed_string=trace_id), evaluation_mode="manual", creation_info=CreationInfo(author=getpass.getuser()))) for trace_id, picker in self.amplitude_picks.items(): if picker.time is not None: amp_pick = Pick( time=picker.time, phase_hint="IAML", waveform_id=WaveformStreamID(seed_string=trace_id), evaluation_mode="manual", creation_info=CreationInfo(author=getpass.getuser())) self.event_out.picks.append(amp_pick) self.event_out.amplitudes.append(Amplitude( generic_amplitude=picker.amplitude, type="A", pick_id=amp_pick.resource_id, waveform_id=WaveformStreamID(seed_string=trace_id), evaluation_mode="manual", creation_info=CreationInfo(author=getpass.getuser()))) for trace_id, picker in self.duration_picks.items(): if picker.time is None: continue # Get linked P pick duration_start = [ p for p in self.event_out.picks if p.phase_hint == "P" and p.waveform_id.get_seed_string() == trace_id] if len(duration_start) == 0: print("No matching P for duration on {0}".format(trace_id)) continue duration_start = sorted([p.time for p in duration_start])[0] print("Duration: {0:.4f}s".format(picker.time - duration_start)) duration_pick = Pick( time=picker.time, phase_hint="END", waveform_id=WaveformStreamID(seed_string=trace_id), evaluation_mode="manual", creation_info=CreationInfo(author=getpass.getuser())) self.event_out.picks.append(duration_pick) self.event_out.amplitudes.append(Amplitude( generic_amplitude=picker.time - duration_start, type="END", pick_id=duration_pick.resource_id, waveform_id=WaveformStreamID(seed_string=trace_id), evaluation_mode="manual", creation_info=CreationInfo(author=getpass.getuser()))) print("Finished processing event. Returning") return
def amp_pick_event(event, st, respdir, chans=['Z'], var_wintype=True, winlen=0.9, pre_pick=0.2, pre_filt=True, lowcut=1.0, highcut=20.0, corners=4): """ Pick amplitudes for local magnitude for a single event. Looks for maximum peak-to-trough amplitude for a channel in a stream, and \ picks this amplitude and period. There are a few things it does \ internally to stabilise the result 1. Applies a given filter to the data - very necessary for small \ magnitude earthquakes; 2. Keeps track of the poles and zeros of this filter and removes then \ from the picked amplitude; 3. Picks the peak-to-trough amplitude, but records half of this: the \ specification for the local magnitude is to use a peak amplitude on \ a horizontal, however, with modern digital seismometers, the peak \ amplitude often has an additional, DC-shift applied to it, to \ stabilise this, and to remove possible issues with de-meaning data \ recorded during the wave-train of an event (e.g. the mean may not be \ the same as it would be for longer durations), we use half the \ peak-to-trough amplitude; 4. Despite the original definition of local magnitude requiring the \ use of a horizontal channel, more recent work has shown that the \ vertical channels give more consistent magnitude estimations between \ stations, due to a reduction in site-amplification effects, we \ therefore use the vertical channels by default, but allow the user \ to chose which channels they deem appropriate; 5. We do not specify that the maximum amplitude should be the \ S-phase: The original definition holds that the maximum body-wave \ amplitude should be used - while this is often the S-phase, we do not \ discrimiate against the P-phase. We do note that, unless the user \ takes care when assigning winlen and filters, they may end up with \ amplitude picks for surface waves; 6. We use a variable window-length by default that takes into account \ P-S times if available, this is in an effort to include only the \ body waves. When P-S times are not available we hard-wire a P-S \ at 0.34 x hypocentral distance. :type event: obspy.core.event.Event :param event: Event to pick :type st: obspy.core.Stream :param st: Stream associated with event :type respdir: str :param respdir: Path to the response information directory :type chans: list :param chans: List of the channels to pick on, defaults to ['Z'] - should \ just be the orientations, e.g. Z,1,2,N,E :type var_wintype: bool :param var_wintype: If True, the winlen will be \ multiplied by the P-S time if both P and S picks are \ available, otherwise it will be multiplied by the \ hypocentral distance*0.34 - derived using a p-s ratio of \ 1.68 and S-velocity of 1.5km/s to give a large window, \ defaults to True :type winlen: float :param winlen: Length of window, see above parameter, if var_wintype is \ False then this will be in seconds, otherwise it is the \ multiplier to the p-s time, defaults to 0.5. :type pre_pick: float :param pre_pick: Time before the s-pick to start the cut window, defaults \ to 0.2 :type pre_filt: bool :param pre_filt: To apply a pre-filter or not, defaults to True :type lowcut: float :param lowcut: Lowcut in Hz for the pre-filter, defaults to 1.0 :type highcut: float :param highcut: Highcut in Hz for the pre-filter, defaults to 20.0 :type corners: int :param corners: Number of corners to use in the pre-filter :returns: obspy.core.event """ # Hardwire a p-s multiplier of hypocentral distance based on p-s ratio of # 1.68 and an S-velocity 0f 1.5km/s, deliberately chosen to be quite slow ps_multiplier = 0.34 from obspy import read from scipy.signal import iirfilter from obspy.signal.invsim import paz_2_amplitude_value_of_freq_resp import warnings from obspy.core.event import Amplitude, Pick, WaveformStreamID # Convert these picks into a lists stations = [] # List of stations channels = [] # List of channels picktimes = [] # List of pick times picktypes = [] # List of pick types distances = [] # List of hypocentral distances picks_out = [] for pick in event.picks: if pick.phase_hint in ['P', 'S']: picks_out.append(pick) # Need to be able to remove this if there # isn't data for a station! stations.append(pick.waveform_id.station_code) channels.append(pick.waveform_id.channel_code) picktimes.append(pick.time) picktypes.append(pick.phase_hint) arrival = [ arrival for arrival in event.origins[0].arrivals if arrival.pick_id == pick.resource_id ][0] distances.append(arrival.distance) st.merge() # merge the data, just in case! # For each station cut the window uniq_stas = list(set(stations)) del (arrival) for sta in uniq_stas: for chan in chans: print('Working on ' + sta + ' ' + chan) tr = st.select(station=sta, channel='*' + chan) if not tr: # Remove picks from file # picks_out=[picks_out[i] for i in range(len(picks))\ # if picks_out[i].station+picks_out[i].channel != \ # sta+chan] warnings.warn('There is no station and channel match in the ' + 'wavefile!') break else: tr = tr[0] # Apply the pre-filter if pre_filt: try: tr.detrend('simple') except: dummy = tr.split() dummy.detrend('simple') tr = dummy.merge()[0] tr.filter('bandpass', freqmin=lowcut, freqmax=highcut, corners=corners) sta_picks = [i for i in range(len(stations)) if stations[i] == sta] pick_id = event.picks[sta_picks[0]].resource_id arrival = [ arrival for arrival in event.origins[0].arrivals if arrival.pick_id == pick_id ][0] hypo_dist = arrival.distance CAZ = arrival.azimuth if var_wintype: if 'S' in [picktypes[i] for i in sta_picks] and\ 'P' in [picktypes[i] for i in sta_picks]: # If there is an S-pick we can use this :D S_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'S' ] S_pick = min(S_pick) P_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'P' ] P_pick = min(P_pick) try: tr.trim(starttime=S_pick - pre_pick, endtime=S_pick + (S_pick - P_pick) * winlen) except ValueError: break elif 'S' in [picktypes[i] for i in sta_picks]: S_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'S' ] S_pick = min(S_pick) P_modelled = S_pick - hypo_dist * ps_multiplier try: tr.trim(starttime=S_pick - pre_pick, endtime=S_pick + (S_pick - P_modelled) * winlen) except ValueError: break else: # In this case we only have a P pick P_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'P' ] P_pick = min(P_pick) S_modelled = P_pick + hypo_dist * ps_multiplier try: tr.trim(starttime=S_modelled - pre_pick, endtime=S_modelled + (S_modelled - P_pick) * winlen) except ValueError: break # Work out the window length based on p-s time or distance elif 'S' in [picktypes[i] for i in sta_picks]: # If the window is fixed we still need to find the start time, # which can be based either on the S-pick (this elif), or # on the hypocentral distance and the P-pick # Take the minimum S-pick time if more than one S-pick is # available S_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'S' ] S_pick = min(S_pick) try: tr.trim(starttime=S_pick - pre_pick, endtime=S_pick + winlen) except ValueError: break else: # In this case, there is no S-pick and the window length is # fixed we need to calculate an expected S_pick based on the # hypocentral distance, this will be quite hand-wavey as we # are not using any kind of velocity model. P_pick = [ picktimes[i] for i in sta_picks if picktypes[i] == 'P' ] P_pick = min(P_pick) hypo_dist = [ distances[i] for i in sta_picks if picktypes[i] == 'P' ][0] S_modelled = P_pick + hypo_dist * ps_multiplier try: tr.trim(starttime=S_modelled - pre_pick, endtime=S_modelled + winlen) except ValueError: break # Find the response information resp_info = _find_resp(tr.stats.station, tr.stats.channel, tr.stats.network, tr.stats.starttime, tr.stats.delta, respdir) PAZ = [] seedresp = [] if resp_info and 'gain' in resp_info: PAZ = resp_info elif resp_info: seedresp = resp_info # Simulate a Wood Anderson Seismograph if PAZ and len(tr.data) > 10: # Set ten data points to be the minimum to pass tr = _sim_WA(tr, PAZ, None, 10) elif seedresp and len(tr.data) > 10: tr = _sim_WA(tr, None, seedresp, 10) elif len(tr.data) > 10: warnings.warn('No PAZ for ' + tr.stats.station + ' ' + tr.stats.channel + ' at time: ' + str(tr.stats.starttime)) continue if len(tr.data) <= 10: # Should remove the P and S picks if len(tr.data)==0 warnings.warn('No data found for: ' + tr.stats.station) # print 'No data in miniseed file for '+tr.stats.station+\ # ' removing picks' # picks_out=[picks_out[i] for i in range(len(picks_out))\ # if i not in sta_picks] break # Get the amplitude amplitude, period, delay = _max_p2t(tr.data, tr.stats.delta) if amplitude == 0.0: break print('Amplitude picked: ' + str(amplitude)) # Note, amplitude should be in meters at the moment! # Remove the pre-filter response if pre_filt: # Generate poles and zeros for the filter we used earlier: this # is how the filter is designed in the convenience methods of # filtering in obspy. z, p, k = iirfilter(corners, [ lowcut / (0.5 * tr.stats.sampling_rate), highcut / (0.5 * tr.stats.sampling_rate) ], btype='band', ftype='butter', output='zpk') filt_paz = { 'poles': list(p), 'zeros': list(z), 'gain': k, 'sensitivity': 1.0 } amplitude /= ( paz_2_amplitude_value_of_freq_resp(filt_paz, 1 / period) * filt_paz['sensitivity']) # Convert amplitude to mm if PAZ: # Divide by Gain to get to nm (returns pm? 10^-12) # amplitude *=PAZ['gain'] amplitude /= 1000 if seedresp: # Seedresp method returns mm amplitude *= 1000000 # Write out the half amplitude, approximately the peak amplitude as # used directly in magnitude calculations # Page 343 of Seisan manual: # Amplitude (Zero-Peak) in units of nm, nm/s, nm/s^2 or counts amplitude *= 0.5 # Append an amplitude reading to the event _waveform_id = WaveformStreamID(station_code=tr.stats.station, channel_code=tr.stats.channel, network_code=tr.stats.network) pick_ind = len(event.picks) event.picks.append( Pick(waveform_id=_waveform_id, phase_hint='IAML', polarity='undecidable', time=tr.stats.starttime + delay, evaluation_mode='automatic')) event.amplitudes.append( Amplitude(generic_amplitude=amplitude / 10**9, period=period, pick_id=event.picks[pick_ind].resource_id, waveform_id=event.picks[pick_ind].waveform_id, unit='m', magnitude_hint='ML', type='AML', category='point')) return event