def test_id_property(self): """ Enure the `id` property of WaveformStreamID returns the same as `get_seed_string`" """ waveform_id = WaveformStreamID(seed_string="BW.FUR.01.EHZ") self.assertEqual(waveform_id.id, waveform_id.get_seed_string())
def assign_stefan_picks(cat, name_map, pk_file, uncert_cutoff): """ Take output from Stefans Spicker and add to catalog :param cat: :param name_map: :param pk_file: :param uncert_cutoff: :return: """ boreholes = ['NS12', 'NS13', 'NS14', 'THQ2'] # For channel naming alph = make_alph() picks = make_pk_dict(name_map, pk_file) for ev in cat: print('For ev: %s' % str(ev.resource_id)) if ev.resource_id in picks: for pk in picks[ev.resource_id]: # (Sigh) Build the datetime from the time string... o_time = ev.preferred_origin().time hour = int(pk['time'].split(':')[0]) minute = int(pk['time'].split(':')[1]) second = int(pk['time'].split(':')[2].split('.')[0]) sta_nm = '{}{}{}'.format(pk['sta'][:2], str(alph[pk['sta'][2]]), str(alph[pk['sta'][3]])) if sta_nm in boreholes: chan_nm = 'EH1' else: chan_nm = 'EHE' if len(pk['time'].split(':')[2].split('.')) == 1: microsecond = 0 else: microsecond = int( pk['time'].split(':')[2].split('.')[1]) * 1000 pk_time = UTCDateTime(year=o_time.year, month=o_time.month, day=o_time.day, hour=hour, minute=minute, second=second, microsecond=microsecond) if pk['sta'][0] == 'N' or pk['sta'][0] == 'R': wv_id = WaveformStreamID(station_code=sta_nm, channel_code=chan_nm) else: wv_id = WaveformStreamID(station_code=pk['sta'], channel_code=chan_nm) if float(pk['error']) < uncert_cutoff: uncert = QuantityError(uncertainty=float(pk['error'])) pk = Pick(time=pk_time, waveform_id=wv_id, phase_hint='S', time_errors=uncert) ev.picks.append(pk) return cat
def test_seishub(self): """Test the seishub method, use obspy default seishub client.""" import sys if sys.version_info.major == 2: from future.backports.urllib.request import URLError else: from urllib.request import URLError t = UTCDateTime(2009, 9, 3) test_cat = Catalog() test_cat.append(Event()) test_cat[0].origins.append(Origin()) test_cat[0].origins[0].time = t test_cat[0].origins[0].latitude = 45 test_cat[0].origins[0].longitude = 45 test_cat[0].origins[0].depth = 5000 test_cat[0].picks.append( Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHZ', network_code='BW'), phase_hint='PG', time=t + 2000)) test_cat[0].picks.append( Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHN', network_code='BW'), phase_hint='SG', time=t + 2005)) test_cat[0].picks.append( Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHE', network_code='BW'), phase_hint='SG', time=t + 2005.5)) test_url = "http://teide.geophysik.uni-muenchen.de:8080" if sys.version_info.major == 3: try: template = template_gen(method="from_seishub", catalog=test_cat, url=test_url, lowcut=1.0, highcut=5.0, samp_rate=20, filt_order=4, length=3, prepick=0.5, swin='all', process_len=300) except URLError: pass else: pass if 'template' in locals(): self.assertEqual(len(template), 3)
def read_pick(line): """ Convert REST pick string to ObsPy Pick object :param line: string containing pick information :type line: str :returns: :class:`obspy.core.event.Pick` and :class:`obspy.core.event.origin.Arrival` """ # line = line.split() # Cannot just split the line :( splits = [0, 6, 10, 15, 18, 22, 28, 29, 41, 49, -1] _line = [] for split in range(len(splits) - 1): _line.append(line[splits[split]: splits[split + 1]].strip()) line = _line pick = Pick(time=UTCDateTime( year=int(line[1]), julday=int(line[2]), hour=int(line[3]), minute=int(line[4])) + float(line[5]), phase_hint=line[7], evaluation_mode="automatic", method_id=ResourceIdentifier("smi:local/REST"), waveform_id=WaveformStreamID(station_code=line[0]), time_errors=QuantityError(uncertainty=float(line[8]))) arrival = Arrival( pick_id=pick.resource_id, time_residual=float(line[9])) return pick, arrival
def make_pick(self, pick_time, wlen_search, stepsize, snr_wlens, phase_hint=None): ipick = self.time_to_index(pick_time) sr = self.stats.sampling_rate stepsize_samp = int(stepsize * sr) snr_wlens_samp = (snr_wlens * sr).astype(int) wlen_search_samp = int(wlen_search * sr) new_pick, snr = tools.repick_using_snr(self.data, ipick, wlen_search_samp, stepsize_samp, snr_wlens_samp) waveform_id = WaveformStreamID(channel_code=self.stats.channel, station_code=self.stats.station) pick = Pick(time=self.index_to_time(newpick), waveform_id=waveform_id, phase_hint=phase_hint, evaluation_mode='automatic', evaluation_status='preliminary', method='snr', snr=snr) return pick
def _block2event(block, seed_map, id_default, ph2comp, eventid_map): """ Read HypoDD event block """ lines = block.strip().splitlines() yr, mo, dy, hr, mn, sc, la, lo, dp, mg, eh, ez, rms, id_ = lines[0].split() if eventid_map is not None and id_ in eventid_map: id_ = eventid_map[id_] time = UTCDateTime(int(yr), int(mo), int(dy), int(hr), int(mn), float(sc), strict=False) laterr = None if float(eh) == 0 else float(eh) / DEG2KM lonerr = (None if laterr is None or float(la) > 89 else laterr / cos(deg2rad(float(la)))) ez = None if float(ez) == 0 else float(ez) * 1000 rms = None if float(rms) == 0 else float(rms) picks = [] arrivals = [] for line in lines[1:]: sta, reltime, weight, phase = line.split() comp = ph2comp.get(phase, '') wid = seed_map.get(sta, id_default) _waveform_id = WaveformStreamID(seed_string=wid.format(sta, comp)) pick = Pick(waveform_id=_waveform_id, phase_hint=phase, time=time + float(reltime)) arrival = Arrival(phase=phase, pick_id=pick.resource_id, time_weight=float(weight)) picks.append(pick) arrivals.append(arrival) qu = OriginQuality(associated_phase_count=len(picks), standard_error=rms) origin = Origin(arrivals=arrivals, resource_id="smi:local/origin/" + id_, quality=qu, latitude=float(la), longitude=float(lo), depth=1000 * float(dp), latitude_errors=laterr, longitude_errors=lonerr, depth_errors=ez, time=time) if mg.lower() == 'nan': magnitudes = [] preferred_magnitude_id = None else: magnitude = Magnitude(mag=mg, resource_id="smi:local/magnitude/" + id_) magnitudes = [magnitude] preferred_magnitude_id = magnitude.resource_id event = Event(resource_id="smi:local/event/" + id_, picks=picks, origins=[origin], magnitudes=magnitudes, preferred_origin_id=origin.resource_id, preferred_magnitude_id=preferred_magnitude_id) return event
def __init__(self, seed_string=None, phase_hint=None, *args, **kwargs): super(Pick, self).__init__() if seed_string: self.waveform_id = WaveformStreamID(seed_string=seed_string) if phase_hint: self.phase_hint = phase_hint self.newID()
def test_stationmagnitude(self): """ Tests StationMagnitude object. """ filename = os.path.join(self.path, 'quakeml_1.2_stationmagnitude.xml') catalog = readQuakeML(filename) self.assertEquals(len(catalog), 1) self.assertEquals(len(catalog[0].station_magnitudes), 1) mag = catalog[0].station_magnitudes[0] # Assert the actual StationMagnitude object. Everything that is not set # in the QuakeML file should be set to None. self.assertEqual( mag.resource_id, ResourceIdentifier("smi:ch.ethz.sed/magnitude/station/881342")) self.assertEquals(mag.origin_id, ResourceIdentifier('smi:some/example/id')) self.assertEquals(mag.mag, 6.5) self.assertEquals(mag.mag_errors.uncertainty, 0.2) self.assertEquals(mag.station_magnitude_type, 'MS') self.assertEqual( mag.amplitude_id, ResourceIdentifier("smi:ch.ethz.sed/amplitude/824315")) self.assertEqual(mag.method_id, ResourceIdentifier(\ "smi:ch.ethz.sed/magnitude/generic/surface_wave_magnitude")) self.assertEqual( mag.waveform_id, WaveformStreamID(network_code='BW', station_code='FUR', resource_uri="smi:ch.ethz.sed/waveform/201754")) self.assertEqual(mag.creation_info, None) # exporting back to XML should result in the same document original = open(filename, "rt").read() processed = Pickler().dumps(catalog) self._compareStrings(original, processed)
def add_picks(tr, method, prev_picks, pick_tol=0.025): wav_id = WaveformStreamID(station_code=tr.stats.station, channel_code=tr.stats.channel, network_code=tr.stats.network) scnl, tpicks, polarity, snr, uncert = get_picks(tr, picker=method, show_plot=False) for ind, tpick in enumerate(tpicks): p = Pick(time=tpick, waveform_id=wav_id, time_errors=QuantityError(uncertainty=uncert[ind]), method_id=method, comments=[Comment(text="SNR = %f" % snr[ind])]) # Check if there is a pick within pick tolerance threshold if prev_picks: prev_tpick = [pick.time for pick in prev_picks] if np.abs(np.array(prev_tpick) - p.time).min() < pick_tol: ix = np.abs(np.array(prev_tpick) - p.time).argmin() if prev_picks[ix].time < p.time: #print("This pick is within pick_tol from previous pick. Keeping previous pick.") continue # Don't add pick else: #print("This pick is within pick_tol from previous pick. Keeping this new pick.") prev_picks.remove(prev_picks[ix]) prev_picks.append(p) else: #print("No previous pick. Appending this one.") prev_picks = [p] return prev_picks
def test_seishub(self): """Test the seishub method, use obspy default seishub client.""" from obspy.core.event import Catalog, Event, Origin, Pick from obspy.core.event import WaveformStreamID from obspy import UTCDateTime import warnings from future import standard_library with standard_library.hooks(): from urllib.request import URLError t = UTCDateTime(2009, 9, 3) test_cat = Catalog() test_cat.append(Event()) test_cat[0].origins.append(Origin()) test_cat[0].origins[0].time = t test_cat[0].origins[0].latitude = 45 test_cat[0].origins[0].longitude = 45 test_cat[0].origins[0].depth = 5000 test_cat[0].\ picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHZ', network_code='BW'), phase_hint='PG', time=t + 2000)) test_cat[0].\ picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHN', network_code='BW'), phase_hint='SG', time=t + 2005)) test_cat[0].\ picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ', channel_code='EHE', network_code='BW'), phase_hint='SG', time=t + 2005.5)) test_url = 'http://teide.geophysik.uni-muenchen.de:8080' try: template = from_seishub(test_cat, url=test_url, lowcut=1.0, highcut=5.0, samp_rate=20, filt_order=4, length=3, prepick=0.5, swin='all', process_len=300) except URLError: warnings.warn('Timed out connection to seishub') if 'template' in locals(): self.assertEqual(len(template), 3)
def __init__(self, seed_string=None, phase_hint=None, *args, **kwargs): super(Pick, self).__init__() if seed_string: self.waveform_id = WaveformStreamID(seed_string=seed_string) if phase_hint: self.phase_hint = phase_hint self.newID() self._CommonEventHelper__set_creation_info()
def test_initialization_with_invalid_seed_string(self): """ Test initialization with an invalid seed string. Should raise a warning. """ # An invalid SEED string will issue a warning and fill the object with # the default values. with warnings.catch_warnings(record=True): warnings.simplefilter('error', UserWarning) with pytest.raises(UserWarning): WaveformStreamID(seed_string="Invalid SEED string") # Now ignore the warnings and test the default values. warnings.simplefilter('ignore', UserWarning) waveform_id = WaveformStreamID(seed_string="Invalid Seed String") assert waveform_id.network_code is None assert waveform_id.station_code is None assert waveform_id.location_code is None assert waveform_id.channel_code is None
def retrieve_usgs_catalog(**kwargs): """ Wrapper on obspy.clients.fdsn.Client and libcomcat (usgs) to retrieve a full catalog, including phase picks (that otherwise are not supported by the usgs fdsn implementation) :param kwargs: Will be passed to the Client (e.g. minlongitude, maxmagnitude etc...) :return: obspy.core.events.Catalog """ cli = Client('https://earthquake.usgs.gov') cat = cli.get_events(**kwargs) # Now loop over each event and grab the phase dataframe using libcomcat for ev in cat: print(ev.resource_id.id) eid = ev.resource_id.id.split('=')[-2].split('&')[0] detail = get_event_by_id(eid, includesuperseded=True) phase_df = get_phase_dataframe(detail) o = ev.preferred_origin() for i, phase_info in phase_df.iterrows(): seed_id = phase_info['Channel'].split('.') loc = seed_id[-1] if loc == '--': loc = '' wf_id = WaveformStreamID(network_code=seed_id[0], station_code=seed_id[1], location_code=loc, channel_code=seed_id[2]) pk = Pick(time=UTCDateTime(phase_info['Arrival Time']), method=phase_info['Status'], waveform_id=wf_id, phase_hint=phase_info['Phase']) ev.picks.append(pk) arr = Arrival(pick_id=pk.resource_id.id, phase=pk.phase_hint, azimuth=phase_info['Azimuth'], distance=phase_info['Distance'], time_residual=phase_info['Residual'], time_weight=phase_info['Weight']) o.arrivals.append(arr) # Try to read focal mechanisms/moment tensors if 'moment-tensor' in detail.products: # Always take MT where available mt_xml = detail.getProducts('moment-tensor')[0].getContentBytes( 'quakeml.xml')[0] elif 'focal-mechanism' in detail.products: mt_xml = detail.getProducts('focal-mechanism')[0].getContentBytes( 'quakeml.xml')[0] else: continue mt_ev = read_events( io.TextIOWrapper(io.BytesIO(mt_xml), encoding='utf-8')) FM = mt_ev[0].focal_mechanisms[0] FM.triggering_origin_id = ev.preferred_origin().resource_id.id ev.focal_mechanisms = [FM] return cat
def __init__(self, seed_string=None, *args, **kwargs): super(Amplitude, self).__init__() if seed_string: self.waveform_id = WaveformStreamID(seed_string=seed_string) self.newID() self.low = None self.high = None self.low_time = None self.high_time = None self.time_window = TimeWindow()
def __toStationMagnitude(parser, stat_mag_el): """ Parses a given station magnitude etree element. :type parser: :class:`~obspy.core.util.xmlwrapper.XMLParser` :param parser: Open XMLParser object. :type stat_mag_el: etree.element :param stat_mag_el: station magnitude element to be parsed. return: A ObsPy :class:`~obspy.core.event.StationMagnitude` object. """ global CURRENT_TYPE mag = StationMagnitude() mag.mag, mag.mag_errors = __toFloatQuantity(parser, stat_mag_el, "mag") mag.resource_id = ResourceIdentifier( prefix="/".join([RESOURCE_ROOT, "station_magnitude"])) # Use the waveform id to store station and channel(s) in the form # station.[channel_1, channel_2] or station.channel in the case only one # channel has been used. # XXX: This might be a violation of how this field is used within QuakeML channels = parser.xpath2obj('channels', stat_mag_el).split(',') channels = ','.join([_i.strip() for _i in channels]) if len(channels) > 1: channels = '%s' % channels station = fix_station_name(parser.xpath2obj('station', stat_mag_el)) location = parser.xpath2obj('location', stat_mag_el, str) or "" mag.waveform_id = WaveformStreamID() # Map some station names. if station in STATION_DICT: station = STATION_DICT[station] mag.waveform_id.station_code = station if CURRENT_TYPE == "obspyck": mag.method_id = "%s/station_magnitude_method/obspyck/1" % RESOURCE_ROOT network = parser.xpath2obj('network', stat_mag_el) if network is None: # network id is not stored in original stationMagnitude, try to find it # in a pick with same station name for waveform in parser.xpath("pick/waveform"): if waveform.attrib.get("stationCode") == station: network = waveform.attrib.get("networkCode") break if network is None: network = NETWORK_DICT[station] if network is None: print "AAAAAAAAAAAAAAAAAAAAAAAAAAHHHHHHHHHHHHHHHHHHH" raise Exception if "," not in channels: mag.waveform_id.channel_code = channels mag.waveform_id.network_code = network mag.waveform_id.location_code = location return mag
def get_default_catalog(self): """Get a catalog with picks from the default stream.""" pick1 = Pick(time=UTCDateTime(2009, 8, 24, 0, 20, 7, 696381), waveform_id=WaveformStreamID(seed_string='BW.RJOB..EHZ'), phase_hint='P') origin = Origin( time=UTCDateTime(2009, 8, 24, 0, 20, 6, 410034), longitude=0, latitude=0, depth=0, ) event = Event(picks=[pick1], origins=[origin]) return Catalog(events=[event])
def _block2event(block, seed_map, id_default, ph2comp): """ Read HypoDD event block """ lines = block.strip().splitlines() yr, mo, dy, hr, mn, sc, la, lo, dp, mg, eh, ez, rms, id_ = lines[0].split() time = UTCDateTime(int(yr), int(mo), int(dy), int(hr), int(mn), float(sc), strict=False) picks = [] arrivals = [] for line in lines[1:]: sta, reltime, weight, phase = line.split() comp = ph2comp.get(phase, '') wid = seed_map.get(sta, id_default) _waveform_id = WaveformStreamID(seed_string=wid.format(sta, comp)) pick = Pick(waveform_id=_waveform_id, phase_hint=phase, time=time + float(reltime)) arrival = Arrival(phase=phase, pick_id=pick.resource_id, time_weight=float(weight)) picks.append(pick) arrivals.append(arrival) qu = None if rms == '0.0' else OriginQuality(standard_error=float(rms)) origin = Origin(arrivals=arrivals, resource_id="smi:local/origin/" + id_, quality=qu, latitude=float(la), longitude=float(lo), depth=1000 * float(dp), time=time) if mg.lower() == 'nan': magnitudes = [] preferred_magnitude_id = None else: magnitude = Magnitude(mag=mg, resource_id="smi:local/magnitude/" + id_) magnitudes = [magnitude] preferred_magnitude_id = magnitude.resource_id event = Event(resource_id="smi:local/event/" + id_, picks=picks, origins=[origin], magnitudes=magnitudes, preferred_origin_id=origin.resource_id, preferred_magnitude_id=preferred_magnitude_id) return event
def test_initialization(self): """ Test the different initialization methods. """ # Default init. waveform_id = WaveformStreamID() self.assertEqual(waveform_id.network_code, None) self.assertEqual(waveform_id.station_code, None) self.assertEqual(waveform_id.location_code, None) self.assertEqual(waveform_id.channel_code, None) # With seed string. waveform_id = WaveformStreamID(seed_string="BW.FUR.01.EHZ") self.assertEqual(waveform_id.network_code, "BW") self.assertEqual(waveform_id.station_code, "FUR") self.assertEqual(waveform_id.location_code, "01") self.assertEqual(waveform_id.channel_code, "EHZ") # As soon as any other argument is set, the seed_string will not be # used and the default values will be used for any unset arguments. waveform_id = WaveformStreamID(location_code="02", seed_string="BW.FUR.01.EHZ") self.assertEqual(waveform_id.network_code, None) self.assertEqual(waveform_id.station_code, None) self.assertEqual(waveform_id.location_code, "02") self.assertEqual(waveform_id.channel_code, None)
def test_no_suitable_picks_event1(self): scale_factor = 0.2 st1 = read() st1.filter("bandpass", freqmin=2, freqmax=20) st2 = st1.copy() event1 = Event(picks=[ Pick(time=tr.stats.starttime + 5, phase_hint="S", waveform_id=WaveformStreamID(seed_string=tr.id)) for tr in st1]) event2 = event1 for tr in st2: tr.data *= scale_factor relative_amplitudes = relative_amplitude( st1=st1, st2=st2, event1=event1, event2=event2) self.assertEqual(len(relative_amplitudes), 0)
def test_write_pha_minimal(self): ori = Origin(time=UTC(0), latitude=42, longitude=43, depth=10000) pick = Pick(time=UTC(10), phase_hint='S', waveform_id=WaveformStreamID(station_code='STA')) del ori.latitude_errors del ori.longitude_errors del ori.depth_errors cat = Catalog([Event(origins=[ori], picks=[pick])]) with NamedTemporaryFile() as tf: tempfile = tf.name with self.assertWarnsRegex(UserWarning, 'Missing mag'): cat.write(tempfile, 'HYPODDPHA') cat2 = read_events(tempfile) self.assertEqual(len(cat2), 1) self.assertEqual(len(cat2[0].picks), 1)
def _phase_to_event(event_text): """ Function to convert the text for one event in hypoDD phase format to \ event object. :type event_text: dict :param event_text: dict of two elements, header and picks, header is a \ str, picks is a list of str. :returns: obspy.core.event.Event """ from obspy.core.event import Event, Origin, Magnitude from obspy.core.event import Pick, WaveformStreamID, Arrival from obspy import UTCDateTime ph_event = Event() # Extract info from header line # YR, MO, DY, HR, MN, SC, LAT, LON, DEP, MAG, EH, EZ, RMS, ID header = event_text['header'].split() ph_event.origins.append(Origin()) ph_event.origins[0].time = UTCDateTime( year=int(header[1]), month=int(header[2]), day=int(header[3]), hour=int(header[4]), minute=int(header[5]), second=int(header[6].split('.')[0]), microsecond=int(float(('0.' + header[6].split('.')[1])) * 1000000)) ph_event.origins[0].latitude = float(header[7]) ph_event.origins[0].longitude = float(header[8]) ph_event.origins[0].depth = float(header[9]) * 1000 ph_event.origins[0].time_errors['Time_Residual_RMS'] = float(header[13]) ph_event.magnitudes.append(Magnitude()) ph_event.magnitudes[0].mag = float(header[10]) ph_event.magnitudes[0].magnitude_type = 'M' # Extract arrival info from picks! for i, pick_line in enumerate(event_text['picks']): pick = pick_line.split() _waveform_id = WaveformStreamID(station_code=pick[0]) pick_time = ph_event.origins[0].time + float(pick[1]) ph_event.picks.append( Pick(waveform_id=_waveform_id, phase_hint=pick[3], time=pick_time)) ph_event.origins[0].arrivals.append( Arrival(phase=ph_event.picks[i], pick_id=ph_event.picks[i].resource_id)) ph_event.origins[0].arrivals[i].time_weight = float(pick[2]) return ph_event
def test_scaled_event(self): scale_factor = 0.2 st1 = read() st1.filter("bandpass", freqmin=2, freqmax=20) st2 = st1.copy() event1 = Event(picks=[ Pick(time=tr.stats.starttime + 5, phase_hint="P", waveform_id=WaveformStreamID(seed_string=tr.id)) for tr in st1]) event2 = event1 for tr in st2: tr.data *= scale_factor relative_amplitudes, _, _ = relative_amplitude( st1=st1, st2=st2, event1=event1, event2=event2) self.assertEqual(len(relative_amplitudes), len(st1)) for value in relative_amplitudes.values(): self.assertAlmostEqual(value, scale_factor)
def parse_pyrocko_markers(marker_file): """Parse picks in Pyrocko markers format""" picks = [] with open(marker_file, 'r') as f: for ln in f: line = ln.split() if line[0] in ['#', 'event:']: continue time = UTCDateTime('T'.join(line[1:3])) nslc = line[4].split('.') pk = Pick(time=time, phase_hint=line[-3], waveform_id=WaveformStreamID(network_code=nslc[0], station_code=nslc[1], location_code=nslc[2], channel_code=nslc[3])) picks.append(pk) return picks
def read_pick_line(string_line, new_event, _network_code): time_origin = new_event.origins[0].time _method_id = 'K' _evaluation_mode = 'automatic' time_error_ref = [0.5, 0.25, 0.1, 0.01] # translate weight into time uncertainty ### Start script line = string_line ### Parse line _station_code = line[1:6].strip() tt = float(line[7:14]) weight = float(line[14:18]) _phase_hint = line[19:21].strip() abs_time = time_origin + tt _waveform_id = WaveformStreamID(network_code=_network_code, station_code=_station_code) ### Put into Pick object _time_errors = weight2error(weight, time_error_ref) pick = Pick(waveform_id=_waveform_id, phase_hint=_phase_hint, time=abs_time, method_id=_method_id, evaluation_mode=_evaluation_mode, time_errors=_time_errors) ### Put into Arrival object arrival = Arrival(pick_id=pick.resource_id, phase=pick.phase_hint) arrival.time_weight = weight ### Append to event new_event.picks.append(pick) new_event.origins[0].arrivals.append(arrival) return new_event
def test_low_snr(self): scale_factor = 0.2 st1 = read() st1[0].data += np.random.randn(st1[0].stats.npts) * st1[0].data.max() st2 = st1.copy() st2[1].data += np.random.randn(st2[1].stats.npts) * st2[1].data.max() event1 = Event(picks=[ Pick(time=tr.stats.starttime + 5, phase_hint="P", waveform_id=WaveformStreamID(seed_string=tr.id)) for tr in st1]) event2 = event1 for tr in st2: tr.data *= scale_factor relative_amplitudes = relative_amplitude( st1=st1, st2=st2, event1=event1, event2=event2) self.assertEqual(len(relative_amplitudes), 1) for value in relative_amplitudes.values(): self.assertAlmostEqual(value, scale_factor)
def add_dummy_picks(self): """ Don't call this function for a real job """ import random for i, r in enumerate(self.all_stations.iterrows()): d = r[1].to_dict() # pandas series to dict res = ResourceIdentifier('custom_pick_{}'.format(i)) wav_id = WaveformStreamID(station_code=d[STATION_CODE], network_code=d[NETWORK_CODE], channel_code='BHN') # randomly choose a time from the available picks p = Pick(resource_id=res, waveform_id=wav_id, phase_hint='S', time=random.choice(self.picks).time) self.picks.append(p) # add to picks list a = Arrival(pick_id=res, phase='S') self._pref_origin.arrivals.append(a) if i == 4: # i.e., insert 5 random picks and arrivals break
def test_initialization_with_invalid_seed_string(self): """ Test initialization with an invalid seed string. Should raise a warning. Skipped for Python 2.5 because it does not have the catch_warnings context manager. """ # An invalid SEED string will issue a warning and fill the object with # the default values. with warnings.catch_warnings(record=True): warnings.simplefilter('error', UserWarning) self.assertRaises(UserWarning, WaveformStreamID, seed_string="Invalid SEED string") # Now ignore the warnings and test the default values. warnings.simplefilter('ignore', UserWarning) waveform_id = WaveformStreamID(seed_string="Invalid Seed String") self.assertEqual(waveform_id.network_code, None) self.assertEqual(waveform_id.station_code, None) self.assertEqual(waveform_id.location_code, None) self.assertEqual(waveform_id.channel_code, None)
def write_events(db, start, end): picks_query, ARC = split_event(db, start, end) ev = Event() ev.event_descriptions.append(EventDescription()) ev.origins.append(Origin( time=UTCDateTime(start), latitude=0, longitude=0, depth=0)) for p in picks_query: _waveform_id_1 = WaveformStreamID( station_code=p.station, channel_code='EHZ', network_code='HL' ) ev.picks.append( Pick(waveform_id=_waveform_id_1, phase_hint='I' + p.phase, time=UTCDateTime(p.time), evaluation_mode="automatic") ) return ev, ARC
def __init__(self, trace, time, name='', comments='', method=method_other, phase_hint=None, polarity='undecidable', aic=None, n0_aic=None, *args, **kwargs): self.trace = trace if time < 0 or time >= len(self.trace.signal): raise ValueError("Event position must be a value between 0 and %d" % len(self.trace.signal)) self.stime = time self.name = name self.method = method self.aic = aic self.n0_aic = n0_aic phase_hint = phase_hint if phase_hint in PHASE_VALUES else PHASE_VALUES[0] super(ApasvoEvent, self).__init__(time=self.time, method_id=ResourceIdentifier(method), phase_hint=phase_hint, polarity=polarity, creation_info=CreationInfo( author=kwargs.get('author', ''), agency_id=kwargs.get('agency', ''), creation_time=UTCDateTime.now(), ), waveform_id=WaveformStreamID( network_code=self.trace.stats.get('network', ''), station_code=self.trace.stats.get('station', ''), location_code=self.trace.stats.get('location', ''), channel_code=self.trace.stats.get('channel', ''), ), *args, **kwargs) self.comments = comments
def parse_picks(pick_file): """ Helper for parsing file with pick information :param pick_file: Path to the file :return: Dictionary of pick info: {eid: {sta.chan: list of picks}} """ pick_dict = {} with open(pick_file, 'r') as f: next(f) for ln in f: ln = ln.strip('\n') line = ln.split(',') eid = line[0] time = UTCDateTime(line[-3]) phase = line[-2] # Placing these per standard convention but not true!! # TODO Maybe these should all be Z? Careful when doing correlations if line[2] not in three_comps: # Hydrophone channels chan = 'XN1' elif phase == 'P': chan = 'XNZ' else: chan = 'XNX' snr = float(line[-1]) if snr == 0.: method = 'manual' else: method = 'phasepapy' wf_id = WaveformStreamID(network_code='SV', station_code=line[2], location_code='', channel_code=chan) pk = Pick(time=time, method=method, waveform_id=wf_id, phase_hint=phase) if eid not in pick_dict: pick_dict[eid] = [pk] else: pick_dict[eid].append(pk) return pick_dict
def _parse_record_p(self, line, event): """ Parses the 'primary phase record' P The primary phase is the first phase of the reading, regardless its type. """ station = line[2:7].strip() phase = line[7:15] arrival_time = line[15:24] residual = self._float(line[25:30]) # unused: residual_flag = line[30] distance = self._float(line[32:38]) # degrees azimuth = self._float(line[39:44]) backazimuth = round(azimuth % -360 + 180, 1) mb_period = self._float(line[44:48]) mb_amplitude = self._float(line[48:55]) # nanometers mb_magnitude = self._float(line[56:59]) # unused: mb_usage_flag = line[59] origin = event.origins[0] evid = event.resource_id.id.split('/')[-1] waveform_id = WaveformStreamID() waveform_id.station_code = station # network_code is required for QuakeML validation waveform_id.network_code = ' ' station_string = \ waveform_id.get_seed_string()\ .replace(' ', '-').replace('.', '_').lower() prefix = '/'.join((res_id_prefix, 'waveformstream', evid, station_string)) waveform_id.resource_uri = ResourceIdentifier(prefix=prefix) pick = Pick() prefix = '/'.join((res_id_prefix, 'pick', evid, station_string)) pick.resource_id = ResourceIdentifier(prefix=prefix) date = origin.time.strftime('%Y%m%d') pick.time = UTCDateTime(date + arrival_time) # Check if pick is on the next day: if pick.time < origin.time: pick.time += timedelta(days=1) pick.waveform_id = waveform_id pick.backazimuth = backazimuth onset = phase[0] if onset == 'e': pick.onset = 'emergent' phase = phase[1:] elif onset == 'i': pick.onset = 'impulsive' phase = phase[1:] elif onset == 'q': pick.onset = 'questionable' phase = phase[1:] pick.phase_hint = phase.strip() event.picks.append(pick) if mb_amplitude is not None: amplitude = Amplitude() prefix = '/'.join((res_id_prefix, 'amp', evid, station_string)) amplitude.resource_id = ResourceIdentifier(prefix=prefix) amplitude.generic_amplitude = mb_amplitude * 1E-9 amplitude.unit = 'm' amplitude.period = mb_period amplitude.type = 'AB' amplitude.magnitude_hint = 'Mb' amplitude.pick_id = pick.resource_id amplitude.waveform_id = pick.waveform_id event.amplitudes.append(amplitude) station_magnitude = StationMagnitude() prefix = '/'.join((res_id_prefix, 'stationmagntiude', evid, station_string)) station_magnitude.resource_id = ResourceIdentifier(prefix=prefix) station_magnitude.origin_id = origin.resource_id station_magnitude.mag = mb_magnitude # station_magnitude.mag_errors['uncertainty'] = 0.0 station_magnitude.station_magnitude_type = 'Mb' station_magnitude.amplitude_id = amplitude.resource_id station_magnitude.waveform_id = pick.waveform_id res_id = '/'.join( (res_id_prefix, 'magnitude/generic/body_wave_magnitude')) station_magnitude.method_id = \ ResourceIdentifier(id=res_id) event.station_magnitudes.append(station_magnitude) arrival = Arrival() prefix = '/'.join((res_id_prefix, 'arrival', evid, station_string)) arrival.resource_id = ResourceIdentifier(prefix=prefix) arrival.pick_id = pick.resource_id arrival.phase = pick.phase_hint arrival.azimuth = azimuth arrival.distance = distance arrival.time_residual = residual res_id = '/'.join((res_id_prefix, 'earthmodel/ak135')) arrival.earth_model_id = ResourceIdentifier(id=res_id) origin.arrivals.append(arrival) origin.quality.minimum_distance = min( d for d in (arrival.distance, origin.quality.minimum_distance) if d is not None) origin.quality.maximum_distance = \ max(arrival.distance, origin.quality.minimum_distance) origin.quality.associated_phase_count += 1 return pick, arrival
def _read_evt(filename, inventory=None, id_map=None, id_default='.{}..{}', encoding='utf-8'): """ Read a SeismicHandler EVT file and returns an ObsPy Catalog object. .. warning:: This function should NOT be called directly, it registers via the ObsPy :func:`~obspy.core.event.read_events` function, call this instead. :type filename: str :param filename: File or file-like object in text mode. :type inventory: :class:`~obspy.core.inventory.inventory.Inventory` :param inventory: Inventory used to retrieve network code, location code and channel code of stations (SEED id). :type id_map: dict :param id_map: If channel information was not found in inventory, it will be looked up in this dictionary (example: `id_map={'MOX': 'GR.{}..HH{}'`). The values must contain three dots and two `{}` which are substituted by station code and component. :type id_default: str :param id_default: Default SEED id expression. The value must contain three dots and two `{}` which are substituted by station code and component. :param str encoding: encoding used (default: utf-8) :rtype: :class:`~obspy.core.event.Catalog` :return: An ObsPy Catalog object. .. note:: The following fields are supported by this function: %s. Compare with http://www.seismic-handler.org/wiki/ShmDocFileEvt """ seed_map = _seed_id_map(inventory, id_map) with io.open(filename, 'r', encoding=encoding) as f: temp = f.read() # first create phases and phases_o dictionaries for different phases # and phases with origin information phases = defaultdict(list) phases_o = {} phase = {} evid = None for line in temp.splitlines(): if 'End of Phase' in line: if 'origin time' in phase.keys(): if evid in phases_o: # found more than one origin pass phases_o[evid] = phase phases[evid].append(phase) phase = {} evid = None elif line.strip() != '': try: key, value = line.split(':', 1) except ValueError: continue key = key.strip().lower() value = value.strip() if key == 'event id': evid = value elif value != '': phase[key] = value assert evid is None # now create obspy Events from phases and phases_o dictionaries events = [] for evid in phases: picks = [] arrivals = [] stamags = [] origins = [] po = None magnitudes = [] pm = None for p in phases[evid]: sta = p.get('station code', '') comp = p.get('component', '') wid = seed_map.get(sta, id_default) wid = WaveformStreamID(seed_string=wid.format(sta, comp)) pick = Pick(waveform_id=wid, **_kw(p, 'pick')) arrival = Arrival(pick_id=pick.resource_id, **_kw(p, 'arrival')) picks.append(pick) arrivals.append(arrival) stamags_temp, _ = _mags(p, evid, stamag=True, wid=wid) stamags.extend(stamags_temp) if evid in phases_o: o = phases_o[evid] uncertainty = OriginUncertainty(**_kw(o, 'origin_uncertainty')) origin = Origin(arrivals=arrivals, origin_uncertainty=uncertainty, **_kw(o, 'origin')) if origin.latitude is None or origin.longitude is None: warn('latitude or longitude not set for event %s' % evid) else: if origin.longitude_errors.uncertainty is not None: origin.longitude_errors.uncertainty *= cos( origin.latitude / 180 * pi) origins = [origin] po = origin.resource_id magnitudes, pm = _mags(o, evid) else: o = p event = Event(resource_id=ResourceIdentifier(evid), picks=picks, origins=origins, magnitudes=magnitudes, station_magnitudes=stamags, preferred_origin_id=po, preferred_magnitude_id=pm, **_kw(o, 'event') ) events.append(event) return Catalog(events, description='Created from SeismicHandler EVT format')