def test_preferred_tags(self): """ Testing preferred magnitude, origin and focal mechanism tags """ # testing empty event ev = Event() self.assertEqual(ev.preferred_origin(), None) self.assertEqual(ev.preferred_magnitude(), None) self.assertEqual(ev.preferred_focal_mechanism(), None) # testing existing event filename = os.path.join(self.path, 'preferred.xml') catalog = read_events(filename) self.assertEqual(len(catalog), 1) ev_str = "Event:\t2012-12-12T05:46:24.120000Z | +38.297, +142.373 " + \ "| 2.0 MW" self.assertIn(ev_str, str(catalog.events[0])) # testing ids ev = catalog.events[0] self.assertEqual('smi:orig2', ev.preferred_origin_id) self.assertEqual('smi:mag2', ev.preferred_magnitude_id) self.assertEqual('smi:fm2', ev.preferred_focal_mechanism_id) # testing objects self.assertEqual(ev.preferred_origin(), ev.origins[1]) self.assertEqual(ev.preferred_magnitude(), ev.magnitudes[1]) self.assertEqual( ev.preferred_focal_mechanism(), ev.focal_mechanisms[1])
def from_event(cls, event: Event): try: origin = event.preferred_origin() or event.origins[-1] except IndexError: raise NotImplementedError("Event needs an origin") polarities = [] for pick in event.picks: if pick.polarity and pick.phase_hint.startswith("P"): # Get the arrival pick_seed_id = pick.waveform_id.get_seed_string() print(f"Found polarity of {pick.polarity} for {pick_seed_id}") for arr in origin.arrivals: arr_pick = arr.pick_id.get_referred_object() if arr_pick and arr_pick.waveform_id.get_seed_string( ) == pick_seed_id: if arr.phase == "P": if arr.takeoff_angle < 0: toa = abs(arr.takeoff_angle) az = arr.azimuth % 360 else: toa = arr.takeoff_angle az = (arr.azimuth + 180) % 360 polarity = Polarity(az, toa, pick.polarity, station=pick_seed_id) polarities.append(polarity) break else: print( f"No arrival found for polarity pick on {pick_seed_id}" ) return cls(polarities=polarities)
def inter_event_distance(event1: Event, event2: Event) -> float: """ Calculate the distance (in degrees) between two events returns ------- distance in degrees between events """ try: origin_1 = event1.preferred_origin() or event1.origins[0] origin_2 = event2.preferred_origin() or event2.origins[0] except IndexError: return 180. return locations2degrees(lat1=origin_1.latitude, long1=origin_1.longitude, lat2=origin_2.latitude, long2=origin_2.longitude)
def event_time(event: Event) -> UTCDateTime: """ Get a time for an event. """ try: timed_obj = event.preferred_origin() or event.origins[0] except IndexError: try: timed_obj = sorted(event.picks, key=lambda p: p.time)[0] except IndexError: print("Neither origin nor pick found") return UTCDateTime(0) return timed_obj.time
def _get_plot_starttime(event: Event, st: Stream) -> UTCDateTime: """Get starttime of a plot given an event and a stream.""" try: attribute_with_time = event.preferred_origin() or event.origins[0] except (AttributeError, IndexError): try: attribute_with_time = AttribDict( {"time": min([p.time for p in event.picks]) - 5}) except ValueError: attribute_with_time = AttribDict( {"time": min([tr.stats.starttime for tr in st])}) return attribute_with_time.time
def get_geonet_waveforms( event: Event, delay: float = 0., length: float = 120., all_components: bool = False, ) -> Stream: """ Get picked GeoNet waveforms for an event. Parameters ---------- event: The event to get waveforms for delay: Delay in seconds relative to the origin-time of the event to download waveforms from length: Total length in seconds for each channel to download all_components: Whether to download all components (True), or just those picked (False) Returns ------- Stream downloaded. """ from obspy.clients.fdsn import Client client = Client("GEONET") seed_ids = {pick.waveform_id.get_seed_string() for pick in event.picks} if all_components: seed_ids = {sid[0:-1] + "?" for sid in seed_ids} try: origin_time = (event.preferred_origin() or event.origins[0]).time except IndexError: try: origin_time = sorted(event.picks, key=lambda p: p.time)[0].time except IndexError: raise NotImplementedError("No origin time or pick times found") st = Stream() for seed_id in seed_ids: print(f"Downloading for {seed_id}") try: st += client.get_waveforms(*seed_id.split('.'), starttime=origin_time + delay, endtime=origin_time + length + delay) except Exception as e: print(f"Some error downloading {seed_id}: \n{e}") st.trim(origin_time + delay, origin_time + delay + length) st.merge().sort(["starttime"]) return st
def estimate_region(event: Event, min_length: float = 50.) -> dict: """ Estimate the region to find templates within given a triggering event. Parameters ---------- event The event that triggered this function min_length Minimum length in km for diameter of event circle around the triggering event Returns ------- Dictionary keyed by "latitude", "longitude" and "maxradius" Notes ----- Uses a basic Wells and Coppersmith relation, scaled by 1.25 times. """ from obspy.geodetics import kilometer2degrees try: origin = event.preferred_origin() or event.origins[0] except IndexError: Logger.error("Triggering event has no origin, not using.") return None try: magnitude = event.preferred_magnitude() or event.magnitudes[0] except IndexError: Logger.warning("Triggering event has no magnitude, using minimum " "length or {0}".format(min_length)) magnitude = None if magnitude: length = 10**((magnitude.mag - 5.08) / 1.16) # Wells and Coppersmith # Scale up a bit - for Darfield this gave 0.6 deg, but the aftershock # region is more like 1.2 deg radius length *= 1.25 else: length = min_length if length <= min_length: length = min_length length = kilometer2degrees(length) length /= 2. return { "latitude": origin.latitude, "longitude": origin.longitude, "maxradius": length }
def event_time(event: Event) -> UTCDateTime: """ Get the origin or first pick time of an event. Parameters ---------- event: Event to get a time for Returns ------- Reference time for event. """ try: origin = event.preferred_origin() or event.origins[0] except IndexError: origin = None if origin is not None: return origin.time if len(event.picks) == 0: return UTCDateTime(0) return min([p.time for p in event.picks])
def _load_events(self): self._load_events_helper() cache = {} notFound = defaultdict(int) oEvents = [] missingStations = defaultdict(int) for e in self.eventList: if (e.preferred_origin and len(e.preferred_origin.arrival_list)): cullList = [] for a in e.preferred_origin.arrival_list: if (len(a.net)): continue seedid = '%s.%s.%s.%s' % (a.net, a.sta, a.loc, a.cha) newCode = None if (seedid not in cache): sc = a.sta lonlat = self.isc_coords_dict[sc] if (len(lonlat) == 0): cullList.append(a) continue # end if r = self.fdsn_inventory.getClosestStations(lonlat[0], lonlat[1], maxdist=1e3) #if(a.sta=='KUM'): print a.net, a.sta, a.loc, a.cha, r if (not r): notFound[sc] += 1 else: for cr in r[0]: c = cr.split('.')[0] newCode = c # end for # end if if (newCode): cache[seedid] = newCode # end if else: newCode = cache[seedid] # end if if (newCode): #print a.net, newCode a.net = newCode sc = self.fdsn_inventory.t[a.net][a.sta] if (type(sc) == defaultdict): cullList.append(a) continue # end if da = gps2dist_azimuth(e.preferred_origin.lat, e.preferred_origin.lon, sc[1], sc[0]) dist = kilometers2degrees(da[0] / 1e3) if (np.fabs(a.distance - dist) > 0.5): cullList.append(a) # end if # end if # end for for c in cullList: e.preferred_origin.arrival_list.remove(c) # end if # Create obspy event object ci = OCreationInfo(author='GA', creation_time=UTCDateTime(), agency_id='GA-iteration-1') oid = self.get_id() origin = OOrigin(resource_id=OResourceIdentifier(id=oid), time=UTCDateTime(e.preferred_origin.utctime), longitude=e.preferred_origin.lon, latitude=e.preferred_origin.lat, depth=e.preferred_origin.depthkm * 1e3, method_id=OResourceIdentifier(id='unknown'), earth_model_id=OResourceIdentifier(id='iasp91'), evaluation_mode='automatic', creation_info=ci) magnitude = OMagnitude( resource_id=OResourceIdentifier(id=self.get_id()), mag=e.preferred_magnitude.magnitude_value, magnitude_type=e.preferred_magnitude.magnitude_type, origin_id=OResourceIdentifier(id=oid), creation_info=ci) event = OEvent(resource_id=OResourceIdentifier(id=self.get_id()), creation_info=ci, event_type='earthquake') event.origins = [origin] event.magnitudes = [magnitude] event.preferred_magnitude_id = magnitude.resource_id event.preferred_origin_id = origin.resource_id # Insert old picks for a in e.preferred_origin.arrival_list: if (type(self.fdsn_inventory.t[a.net][a.sta]) == defaultdict): missingStations[a.net + '.' + a.sta] += 1 continue # end if oldPick = OPick( resource_id=OResourceIdentifier(id=self.get_id()), time=UTCDateTime(a.utctime), waveform_id=OWaveformStreamID(network_code=a.net, station_code=a.sta, channel_code=a.cha), methodID=OResourceIdentifier('unknown'), phase_hint=a.phase, evaluation_mode='automatic', creation_info=ci) oldArr = OArrival(resource_id=OResourceIdentifier( id=oldPick.resource_id.id + "#"), pick_id=oldPick.resource_id, phase=oldPick.phase_hint, distance=a.distance, earth_model_id=OResourceIdentifier( 'quakeml:ga.gov.au/earthmodel/iasp91'), creation_info=ci) event.picks.append(oldPick) event.preferred_origin().arrivals.append(oldArr) # end for # Insert our picks opList = self.our_picks.picks[e.public_id] if (len(opList)): for op in opList: if (type(self.fdsn_inventory.t[op[1]][op[2]]) == defaultdict): missingStations[op[1] + '.' + op[2]] += 1 continue # end if newPick = OPick( resource_id=OResourceIdentifier(id=self.get_id()), time=UTCDateTime(op[0]), waveform_id=OWaveformStreamID(network_code=op[1], station_code=op[2], channel_code=op[3]), methodID=OResourceIdentifier('phasepapy/aicd'), backazimuth=op[-1], phase_hint=op[4], evaluation_mode='automatic', comments=op[6], creation_info=ci) newArr = OArrival( resource_id=OResourceIdentifier( id=newPick.resource_id.id + "#"), pick_id=newPick.resource_id, phase=newPick.phase_hint, azimuth=op[-2], distance=op[-3], time_residual=op[5], time_weight=1., earth_model_id=OResourceIdentifier( 'quakeml:ga.gov.au/earthmodel/iasp91'), creation_info=ci) event.picks.append(newPick) event.preferred_origin().arrivals.append(newArr) # end for # end if quality = OOriginQuality( associated_phase_count=len(e.preferred_origin.arrival_list) + len(self.our_picks.picks[e.public_id]), used_phase_count=len(e.preferred_origin.arrival_list) + len(self.our_picks.picks[e.public_id])) event.preferred_origin().quality = quality oEvents.append(event) # end for // loop over e #print notFound print self.rank, missingStations cat = OCatalog(events=oEvents) ofn = self.output_path + '/%d.xml' % (self.rank) cat.write(ofn, format='SC3ML')
def outputOBSPY(hp, event=None, only_fm_picks=False): """ Make an Event which includes the current focal mechanism information from HASH Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism. This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones. Inputs ------- hp : hashpy.HashPype instance event : obspy.core.event.Event only_fm_picks : bool of whether to overwrite the picks/arrivals lists Returns ------- obspy.core.event.Event Event will be new if no event was input, FocalMech added to existing event """ # Returns new (or updates existing) Event with HASH solution n = hp.npol if event is None: event = Event(focal_mechanisms=[], picks=[], origins=[]) origin = Origin(arrivals=[]) origin.time = UTCDateTime(hp.tstamp) origin.latitude = hp.qlat origin.longitude = hp.qlon origin.depth = hp.qdep origin.creation_info = CreationInfo(version=hp.icusp) origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format( hp.icusp)) for _i in range(n): p = Pick() p.creation_info = CreationInfo(version=hp.arid[_i]) p.resource_id = ResourceIdentifier('smi:nsl/Pick/{0}'.format( p.creation_info.version)) p.waveform_id = WaveformStreamID(network_code=hp.snet[_i], station_code=hp.sname[_i], channel_code=hp.scomp[_i]) if hp.p_pol[_i] > 0: p.polarity = 'positive' else: p.polarity = 'negative' a = Arrival() a.creation_info = CreationInfo(version=hp.arid[_i]) a.resource_id = ResourceIdentifier('smi:nsl/Arrival/{0}'.format( p.creation_info.version)) a.azimuth = hp.p_azi_mc[_i, 0] a.takeoff_angle = 180. - hp.p_the_mc[_i, 0] a.pick_id = p.resource_id origin.arrivals.append(a) event.picks.append(p) event.origins.append(origin) event.preferred_origin_id = origin.resource_id.resource_id else: # just update the changes origin = event.preferred_origin() picks = [] arrivals = [] for _i in range(n): ind = hp.p_index[_i] a = origin.arrivals[ind] p = a.pick_id.getReferredObject() a.takeoff_angle = hp.p_the_mc[_i, 0] picks.append(p) arrivals.append(a) if only_fm_picks: origin.arrivals = arrivals event.picks = picks # Use me double couple calculator and populate planes/axes etc x = hp._best_quality_index # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred for s in range(hp.nmult): dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]]) ax = dc.axis focal_mech = FocalMechanism() focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(), author=hp.author) focal_mech.triggering_origin_id = origin.resource_id focal_mech.resource_id = ResourceIdentifier( 'smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s + 1)) focal_mech.method_id = ResourceIdentifier('HASH') focal_mech.nodal_planes = NodalPlanes() focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1) focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2) focal_mech.principal_axes = PrincipalAxes() focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'], plunge=ax['T']['dip']) focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'], plunge=ax['P']['dip']) focal_mech.station_polarity_count = n focal_mech.azimuthal_gap = hp.magap focal_mech.misfit = hp.mfrac[s] focal_mech.station_distribution_ratio = hp.stdr[s] focal_mech.comments.append( Comment( hp.qual[s], resource_id=ResourceIdentifier( focal_mech.resource_id.resource_id + '/comment/quality'))) #---------------------------------------- event.focal_mechanisms.append(focal_mech) if s == x: event.preferred_focal_mechanism_id = focal_mech.resource_id.resource_id return event
class DBToQuakemlConverter(AntelopeToEventConverter): """ Antelope -> Event converter with customizations for writing QuakeML files Methods ------- build(self, evid=None, orid=None, delete=False, phase_data=False, focal_data=False): Build up an Event using various parameters quakeml_str(): Return QuakeML string of the current Event object quakeml_anss_attrib(self, evid=None): Construct dict of ANSS attributes quakeml_filename(self, product): Try to construct a meaningful XML filename """ rid_factory = rid_function def quakeml_anss_attrib(self, evid=None): """ Returns stuff necessary for quakeml files These things are specific to a datacenter, in an effort to generalize the actual writer function as much as possible. Input ----- evid : int of some event identifier to name the file agency : str of name or code of agency creating file (netcode) Returns : dict of the 4 ANSS 'catalog' attributes with meaningful values. """ agency_code = self.agency.lower() if evid: anss_id = '{0:08d}'.format(evid) else: anss_id = '00000000' return {'datasource' : agency_code, 'dataid' : agency_code + anss_id, 'eventsource' : agency_code, 'eventid' : anss_id} def quakeml_filename(self, product): return self.event.extra['dataid']['value'] + '_' + product + '.xml' def extra_anss(self, **kwargs): """ Create an dictionary for ANSS vars for use by event classes 'extra' attribute Inputs ------ kwargs SHOULD be one of ('datasource','dataid','eventsource','eventid') Returns : dict of obspy 'extra' format """ # in new "extra" patch, use both for now # NOTE: Obspy 0.9.3+ should support this natively, NO PATCH!! # - '_namespace' renamed to 'namespace' # - '_type renamed' to 'type' extra_attrib = {} ns_anss = 'http://anss.org/xmlns/catalog/0.1' self.nsmap.update({'catalog': ns_anss}) for a in kwargs: extra_attrib[a] = {'value': kwargs[a], 'namespace': ns_anss, 'type': 'attribute'} return extra_attrib def build(self, evid=None, orid=None, delete=False, phase_data=False, focal_data=False): """ Build up an Event object Inputs ------ evid : int of EVID orid : int of ORID delete : bool of whether to mark event deleted (False) phase_data : bool of whether to include phase arrivals for event (False) focal_data : bool of whether to look for focal mechanisms (False) """ #--- Build an Event based on params -------------------------------------- if evid is None and orid: try: evid = self._evid(orid) except: pass # 1. Build a stub Event to send a delete if delete: self.event = Event(event_type="not existing") self.event.creation_info = CreationInfo(version=evid, creation_time=UTCDateTime()) self.event.resource_id = self._rid(self.event) else: self._build(orid=orid, phases=phase_data, focals=focal_data, event_type="not reported") # if no EVID reported, try to get it from the db (version attribute) if not evid: evid = int(self.event.creation_info.version) # Add a nearest event string, try to set event type with custom etype additions prefor = self.event.preferred_origin() if prefor is not None: self.event.event_type = self.origin_event_type(prefor, emap=self.emap) ed = self.get_nearest_event_description(prefor.latitude, prefor.longitude) self.event.event_descriptions = [ed] # Generate NSL namespace attributes extra_attributes = self.quakeml_anss_attrib(evid) self.event.extra = self.extra_anss(**extra_attributes) @classmethod def _qmls(cls, c): """ Writes Catalog object to QuakeML string Inputs ------ c : obspy.core.event.Catalog Returns : str of QuakeML file contents """ return Pickler(nsmap=cls.nsmap).dumps(c) def quakeml_str(self): """ Return QuakeML string of current Event object :returns: str of QuakeML file contents """ return self._qmls(self.catalog)
def get_inventory( client, tribe: Union[RealTimeTribe, Tribe], triggering_event: Event = None, location: dict = None, starttime: UTCDateTime = None, max_distance: float = 1000., n_stations: int = 10, duration: float = 10, level: str = "channel", channel_list: Union[list, tuple] = ("EH?", "HH?"), ) -> Inventory: """ Get a suitable inventory for a tribe - selects the most used, closest stations. Parameters ---------- client: Obspy client with a get_stations service. tribe: Tribe or RealTimeTribe of templates to query for stations. triggering_event: Event with at least an origin to calculate distances from - if not specified will use `location` location: Dictionary with "latitude" and "longitude" keys - only used if `triggering event` is not specified. starttime: Start-time for station search - only used if `triggering_event` is not specified. max_distance: Maximum distance from `triggering_event.preferred_origin` or `location` to find stations. Units: km n_stations: Maximum number of stations to return duration: Duration stations must be active for. Units: days level: Level for inventory parsable by `client.get_stations`. channel_list List of channel-codes to be acquired. If `None` then all channels will be searched. Returns ------- Inventory of the most used, closest stations. """ inv = Inventory(networks=[], source=None) if triggering_event is not None: try: origin = ( triggering_event.preferred_origin() or triggering_event.origins[0]) except IndexError: Logger.error("Triggering event has no origin") return inv lat = origin.latitude lon = origin.longitude _starttime = origin.time else: lat = location["latitude"] lon = location["longitude"] _starttime = starttime for channel_str in channel_list or ["*"]: try: inv += client.get_stations( startbefore=_starttime, endafter=_starttime + (duration * 86400), channel=channel_str, latitude=lat, longitude=lon, maxradius=kilometer2degrees(max_distance), level=level) except FDSNNoDataException: continue if len(inv) == 0: return inv # Calculate distances station_count = Counter( [pick.waveform_id.station_code for template in tribe for pick in template.event.picks]) sta_dist = [] for net in inv: for sta in net: dist = locations2degrees( lat1=lat, long1=lon, lat2=sta.latitude, long2=sta.longitude) sta_dist.append((sta.code, dist, station_count[sta.code])) sta_dist.sort(key=lambda _: (-_[2], _[1])) inv_out = inv.select(station=sta_dist[0][0]) for sta in sta_dist[1:n_stations]: inv_out += inv.select(station=sta[0]) return inv_out
def _load_events(self): self._load_events_helper() cache = {} notFound = defaultdict(int) oEvents = [] missingStations = defaultdict(int) lines = [] for e in tqdm(self.eventList, desc='Rank %d' % (self.rank)): if (e.preferred_origin and len(e.preferred_origin.arrival_list)): cullList = [] for a in e.preferred_origin.arrival_list: if (len(a.net)): continue seedid = '%s.%s.%s.%s' % (a.net, a.sta, a.loc, a.cha) newCode = None if (seedid not in cache): sc = a.sta lonlat = self.isc_coords_dict[sc] if (len(lonlat) == 0): cullList.append(a) continue # end if r = self.fdsn_inventory.getClosestStation( lonlat[0], lonlat[1], maxdist=1e3) # 1km #if(a.sta=='KUM'): print a.net, a.sta, a.loc, a.cha, r if (not r): notFound[sc] += 1 else: c = r[0].split('.')[0] newCode = c # end if if (newCode): cache[seedid] = newCode # end if else: newCode = cache[seedid] # end if if (newCode): #print a.net, newCode a.net = newCode sc = self.fdsn_inventory.t[a.net][a.sta] if (type(sc) == defaultdict): cullList.append(a) continue # end if da = gps2dist_azimuth(e.preferred_origin.lat, e.preferred_origin.lon, sc[1], sc[0]) dist = kilometers2degrees(da[0] / 1e3) if (np.fabs(a.distance - dist) > 0.5): #print ([e.preferred_origin.lon, e.preferred_origin.lat, sc[0], sc[1]]) cullList.append(a) # end if # end if # end for for c in cullList: e.preferred_origin.arrival_list.remove(c) # end if # Create obspy event object ci = OCreationInfo(author='GA', creation_time=UTCDateTime(), agency_id='GA-iteration-1') oid = self.get_id() origin = OOrigin(resource_id=OResourceIdentifier(id=oid), time=UTCDateTime(e.preferred_origin.utctime), longitude=e.preferred_origin.lon, latitude=e.preferred_origin.lat, depth=e.preferred_origin.depthkm * 1e3, method_id=OResourceIdentifier(id='unknown'), earth_model_id=OResourceIdentifier(id='iasp91'), evaluation_mode='automatic', creation_info=ci) magnitude = OMagnitude( resource_id=OResourceIdentifier(id=self.get_id()), mag=e.preferred_magnitude.magnitude_value, magnitude_type=e.preferred_magnitude.magnitude_type, origin_id=OResourceIdentifier(id=oid), creation_info=ci) event = OEvent( resource_id=OResourceIdentifier(id=str(e.public_id)), creation_info=ci, event_type='earthquake') event.origins = [origin] event.magnitudes = [magnitude] event.preferred_magnitude_id = magnitude.resource_id event.preferred_origin_id = origin.resource_id # Insert old picks if (not self.discard_old_picks): for a in e.preferred_origin.arrival_list: if (type(self.fdsn_inventory.t[a.net][a.sta]) == defaultdict): missingStations[a.net + '.' + a.sta] += 1 continue # end if oldPick = OPick( resource_id=OResourceIdentifier(id=self.get_id()), time=UTCDateTime(a.utctime), waveform_id=OWaveformStreamID(network_code=a.net, station_code=a.sta, channel_code=a.cha), methodID=OResourceIdentifier('unknown'), phase_hint=a.phase, evaluation_mode='automatic', creation_info=ci) oldArr = OArrival( resource_id=OResourceIdentifier( id=oldPick.resource_id.id + "#"), pick_id=oldPick.resource_id, phase=oldPick.phase_hint, distance=a.distance, earth_model_id=OResourceIdentifier( 'quakeml:ga.gov.au/earthmodel/iasp91'), creation_info=ci) event.picks.append(oldPick) event.preferred_origin().arrivals.append(oldArr) # polulate list for text output line = [ str(e.public_id), '{:<25s}', e.preferred_origin.utctime.timestamp, '{:f}', e.preferred_magnitude.magnitude_value, '{:f}', e.preferred_origin.lon, '{:f}', e.preferred_origin.lat, '{:f}', e.preferred_origin.depthkm, '{:f}', a.net, '{:<5s}', a.sta, '{:<5s}', a.cha, '{:<5s}', a.utctime.timestamp, '{:f}', a.phase, '{:<5s}', self.fdsn_inventory.t[a.net][a.sta][0], '{:f}', self.fdsn_inventory.t[a.net][a.sta][1], '{:f}', -999, '{:f}', -999, '{:f}', a.distance, '{:f}', -999, '{:f}', -999, '{:f}', -999, '{:f}', -999, '{:f}', -999, '{:f}', -999, '{:d}', -999, '{:d}' ] lines.append(line) # end for # end if # Insert our picks opList = self.our_picks.picks[e.public_id] if (len(opList)): for op in opList: if (type(self.fdsn_inventory.t[op[1]][op[2]]) == defaultdict): missingStations[op[1] + '.' + op[2]] += 1 continue # end if newPick = OPick( resource_id=OResourceIdentifier(id=self.get_id()), time=UTCDateTime(op[0]), waveform_id=OWaveformStreamID(network_code=op[1], station_code=op[2], channel_code=op[3]), methodID=OResourceIdentifier('phasepapy/aicd'), backazimuth=op[-1], phase_hint=op[4], evaluation_mode='automatic', comments=[ OComment( text='phasepapy_snr = ' + str(op[6][0]) + ', quality_measure_cwt = ' + str(op[6][1]) + ', dom_freq = ' + str(op[6][2]) + ', quality_measure_slope = ' + str(op[6][3]) + ', band_index = ' + str(op[6][4]) + ', nsigma = ' + str(op[6][5]), force_resource_id=False) ], creation_info=ci) newArr = OArrival( resource_id=OResourceIdentifier( id=newPick.resource_id.id + "#"), pick_id=newPick.resource_id, phase=newPick.phase_hint, azimuth=op[-2], distance=op[-3], time_residual=op[5], time_weight=1., earth_model_id=OResourceIdentifier( 'quakeml:ga.gov.au/earthmodel/iasp91'), creation_info=ci) event.picks.append(newPick) event.preferred_origin().arrivals.append(newArr) # polulate list for text output line = [ str(e.public_id), '{:<25s}', e.preferred_origin.utctime.timestamp, '{:f}', e.preferred_magnitude.magnitude_value, '{:f}', e.preferred_origin.lon, '{:f}', e.preferred_origin.lat, '{:f}', e.preferred_origin.depthkm, '{:f}', op[1], '{:<5s}', op[2], '{:<5s}', op[3], '{:<5s}', UTCDateTime(op[0]).timestamp, '{:f}', op[4], '{:<5s}', op[10], '{:f}', op[9], '{:f}', op[12], '{:f}', op[13], '{:f}', op[11], '{:f}', op[5], '{:f}', op[6][0], '{:f}', op[6][1], '{:f}', op[6][2], '{:f}', op[6][3], '{:f}', int(op[6][4]), '{:d}', int(op[6][5]), '{:d}' ] lines.append(line) # end for # end if quality= OOriginQuality(associated_phase_count= len(e.preferred_origin.arrival_list) * \ int(self.discard_old_picks) + \ len(self.our_picks.picks[e.public_id]), used_phase_count=len(e.preferred_origin.arrival_list) * \ int(self.discard_old_picks) + \ len(self.our_picks.picks[e.public_id])) event.preferred_origin().quality = quality if (len(self.our_picks.picks[e.public_id]) == 0 and self.discard_old_picks): continue # end if oEvents.append(event) # end for // loop over e if (len(missingStations)): for k, v in missingStations.items(): self.logger.warning('Missing station %s: %d picks' % (k, v)) # end for # end if # write xml output if (len(oEvents)): cat = OCatalog(events=oEvents) ofn = self.output_path + '/%d.xml' % (self.rank) cat.write(ofn, format='SC3ML') # end if # write text output procfile = open('%s/proc.%d.txt' % (self.output_path, self.rank), 'w+') for line in lines: lineout = ' '.join(line[1::2]).format(*line[::2]) procfile.write(lineout + '\n') # end for procfile.close() # combine text output header = '#eventID originTimestamp mag originLon originLat originDepthKm net sta cha pickTimestamp phase stationLon stationLat az baz distance ttResidual snr qualityMeasureCWT domFreq qualityMeasureSlope bandIndex nSigma\n' self.comm.barrier() if (self.rank == 0): of = open('%s/ensemble.txt' % (self.output_path), 'w+') of.write(header) for i in range(self.nproc): fn = '%s/proc.%d.txt' % (self.output_path, i) lines = open(fn, 'r').readlines() for line in lines: of.write(line) # end for if (os.path.exists(fn)): os.remove(fn) # end for of.close()
class Converter(DBToQuakemlConverter): """ Custom overrides on QuakemlConverter for NSL 1) rid_factory : if RID is for an Event, use the web URL which resolves to an actual page. 2) build : check for an 'mt' string, and run the special converter to get an Event/FocalMech/Mag/MomentTensor out of it... """ rid_factory = CustomRIDFunction() automatic_authors = ['orbassoc', 'orbmag', 'HYPOI:rt'] def build(self, evid=None, orid=None, delete=False, phase_data=False, focal_data=False, mt=None): """ Build up an Event object Inputs ------ evid : int of EVID orid : int of ORID delete : bool of whether to mark event deleted (False) phase_data : bool of whether to include phase arrivals for event (False) focal_data : bool of whether to look for focal mechanisms (False) mt : file/contents of NSL moment tensor (Ichinose) Returns : obspy.core.event.Event """ #--- Build an Event based on params -------------------------------------- if evid is None and orid: try: evid = self._evid(orid) except: pass # 1. Build a stub Event to send a delete if delete: self.event = Event(event_type="not existing") self.event.creation_info = CreationInfo(version=evid, creation_time=UTCDateTime()) self.event.resource_id = self._rid(self.event) elif mt: # 2. Make a custom event (mt is a special-formatted text file) #_RIDFactory = type('RIDFactory', (CustomRIDFunction,), {'authority': self.auth_id}) self.event = mt2event(mt, rid_factory=CustomRIDFunction(self.auth_id)) # 3. Use EventBuilder to get Event from the db else: self._build(orid=orid, phases=phase_data, focals=focal_data, event_type="not reported") # if no EVID reported, try to get it from the db (version attribute) if not evid: evid = int(self.event.creation_info.version) # Add a nearest event string, try to set event type with custom etype additions prefor = self.event.preferred_origin() if prefor is not None: event_type = self.origin_event_type(prefor, emap=self.emap) if event_type is None: event_type = "earthquake" self.event.event_type = event_type ed = self.get_nearest_event_description(prefor.latitude, prefor.longitude) self.event.event_descriptions = [ed] # get rid of preferred if sending focalmech, so it doesn't clobber a # better origin (This is a hack to deal with USGS splitting QuakeML # into different products, In theory, one should be able to have a # QuakeML file with everything, but alas) if focal_data: self.event.preferred_origin_id = None self.event.preferred_magnitude_id = None # Generate NSL namespace attributes extra_attributes = self.quakeml_anss_attrib(evid) self.event.extra = self.extra_anss(**extra_attributes)
def estimate_region( event: Event, min_length: float = 50., scaling_relation: Union[str, Callable] = 'default', multiplier: float = 1.25, ) -> dict: """ Estimate the region to find templates within given a triggering event. Parameters ---------- event The event that triggered this function min_length Minimum length in km for diameter of event circle around the triggering event scaling_relation Name of registered scaling-relationship or Callable that takes only the earthquake magnitude as an argument and returns length in km multiplier Fudge factor to scale the scaling relation up by a constant. Returns ------- Dictionary keyed by "latitude", "longitude" and "maxradius" Notes ----- The `scaling_relation` * `multiplier` defines the `maxradius` of the region """ from obspy.geodetics import kilometer2degrees try: origin = event.preferred_origin() or event.origins[0] except IndexError: Logger.error("Triggering event has no origin, not using.") return None try: magnitude = event.preferred_magnitude() or event.magnitudes[0] except IndexError: Logger.warning("Triggering event has no magnitude, using minimum " "length or {0}".format(min_length)) magnitude = None if magnitude: if not callable(scaling_relation): scaling_relation = get_scaling_relation(scaling_relation) length = scaling_relation(magnitude.mag) length *= multiplier else: length = min_length if length <= min_length: length = min_length length = kilometer2degrees(length) length /= 2. return { "latitude": origin.latitude, "longitude": origin.longitude, "maxradius": length }
def plot_event(event: Event, st: Stream, length: float = 60., passband: tuple = (2, 10), size: tuple = (10.5, 10.5), show: bool = True, fig: Figure = None) -> Figure: """ Plot the waveforms for an event with pick and calculated arrival times. event Event to plot st Obspy Stream for this event length Length to plot, from origin time passband Tuple of (lowcut, highcut) for filtering. size Figure size parsed to matplotlib. show Whether to show the figure or not. fig Figure to plot into. Returns Figure. """ import matplotlib.pyplot as plt event.picks.sort(key=lambda p: p.time) origin_time = _get_plot_starttime(event, st) _st = st.slice(origin_time, origin_time + length).copy() _st = _st.split().detrend().filter("bandpass", freqmin=passband[0], freqmax=passband[1]).merge() # Trim the event around the origin time if fig is None: fig, axes = plt.subplots(len(_st), 1, sharex=True, figsize=size) if len(_st) == 1: axes = [axes] else: axes = [fig.add_subplot(len(_st), 1, 1)] if len(_st) > 1: for i in range(len(_st) - 1): axes.append(fig.add_subplot(len(_st), 1, i + 2, sharex=axes[0])) lines, labels = ([], []) min_x = [] max_x = [] for ax, tr in zip(axes, _st): picks, arrivals = ([], []) for pick in event.picks: if pick.waveform_id.station_code == tr.stats.station: picks.append(pick) try: origin = event.preferred_origin() or event.origins[0] for arrival in origin.arrivals: referenced_pick = arrival.pick_id.get_referred_object() if referenced_pick.waveform_id.station_code == tr.stats.station: arrivals.append(arrival) except IndexError: # pragma: no cover pass lines, labels, chan_min_x, chan_max_x = _plot_channel( ax=ax, tr=tr, picks=picks, arrivals=arrivals, lines=lines, labels=labels) min_x.append(chan_min_x) max_x.append(chan_max_x) # Make xlabels invisible for all axes apart from the last one for ax in axes[0:-1]: plt.setp(ax.get_xticklabels(), visible=False) axes[-1].set_xlim([np.min(min_x), np.max(max_x)]) axes[-1].set_xlabel("Time") plt.tight_layout() plt.subplots_adjust(hspace=0) fig.legend(lines, labels) if show: # pragma: no cover fig.show() del _st return fig
def outputOBSPY(hp, event=None, only_fm_picks=False): """ Make an Event which includes the current focal mechanism information from HASH Use the 'only_fm_picks' flag to only include the picks HASH used for the FocalMechanism. This flag will replace the 'picks' and 'arrivals' lists of existing events with new ones. Inputs ------- hp : hashpy.HashPype instance event : obspy.core.event.Event only_fm_picks : bool of whether to overwrite the picks/arrivals lists Returns ------- obspy.core.event.Event Event will be new if no event was input, FocalMech added to existing event """ # Returns new (or updates existing) Event with HASH solution n = hp.npol if event is None: event = Event(focal_mechanisms=[], picks=[], origins=[]) origin = Origin(arrivals=[]) origin.time = UTCDateTime(hp.tstamp) origin.latitude = hp.qlat origin.longitude = hp.qlon origin.depth = hp.qdep origin.creation_info = CreationInfo(version=hp.icusp) origin.resource_id = ResourceIdentifier('smi:hash/Origin/{0}'.format(hp.icusp)) for _i in range(n): p = Pick() p.creation_info = CreationInfo(version=hp.arid[_i]) p.resource_id = ResourceIdentifier('smi:hash/Pick/{0}'.format(p.creation_info.version)) p.waveform_id = WaveformStreamID(network_code=hp.snet[_i], station_code=hp.sname[_i], channel_code=hp.scomp[_i]) if hp.p_pol[_i] > 0: p.polarity = 'positive' else: p.polarity = 'negative' a = Arrival() a.creation_info = CreationInfo(version=hp.arid[_i]) a.resource_id = ResourceIdentifier('smi:hash/Arrival/{0}'.format(p.creation_info.version)) a.azimuth = hp.p_azi_mc[_i,0] a.takeoff_angle = 180. - hp.p_the_mc[_i,0] a.pick_id = p.resource_id origin.arrivals.append(a) event.picks.append(p) event.origins.append(origin) event.preferred_origin_id = str(origin.resource_id) else: # just update the changes origin = event.preferred_origin() picks = [] arrivals = [] for _i in range(n): ind = hp.p_index[_i] a = origin.arrivals[ind] p = a.pick_id.getReferredObject() a.takeoff_angle = hp.p_the_mc[_i,0] picks.append(p) arrivals.append(a) if only_fm_picks: origin.arrivals = arrivals event.picks = picks # Use me double couple calculator and populate planes/axes etc x = hp._best_quality_index # Put all the mechanisms into the 'focal_mechanisms' list, mark "best" as preferred for s in range(hp.nmult): dc = DoubleCouple([hp.str_avg[s], hp.dip_avg[s], hp.rak_avg[s]]) ax = dc.axis focal_mech = FocalMechanism() focal_mech.creation_info = CreationInfo(creation_time=UTCDateTime(), author=hp.author) focal_mech.triggering_origin_id = origin.resource_id focal_mech.resource_id = ResourceIdentifier('smi:hash/FocalMechanism/{0}/{1}'.format(hp.icusp, s+1)) focal_mech.method_id = ResourceIdentifier('HASH') focal_mech.nodal_planes = NodalPlanes() focal_mech.nodal_planes.nodal_plane_1 = NodalPlane(*dc.plane1) focal_mech.nodal_planes.nodal_plane_2 = NodalPlane(*dc.plane2) focal_mech.principal_axes = PrincipalAxes() focal_mech.principal_axes.t_axis = Axis(azimuth=ax['T']['azimuth'], plunge=ax['T']['dip']) focal_mech.principal_axes.p_axis = Axis(azimuth=ax['P']['azimuth'], plunge=ax['P']['dip']) focal_mech.station_polarity_count = n focal_mech.azimuthal_gap = hp.magap focal_mech.misfit = hp.mfrac[s] focal_mech.station_distribution_ratio = hp.stdr[s] focal_mech.comments.append( Comment(hp.qual[s], resource_id=ResourceIdentifier(str(focal_mech.resource_id) + '/comment/quality')) ) #---------------------------------------- event.focal_mechanisms.append(focal_mech) if s == x: event.preferred_focal_mechanism_id = str(focal_mech.resource_id) return event
def synthesise_real_time( triggering_event: Event, database_duration: float, config: Config, detection_runtime: float = 3600.0, make_templates: bool = True, speed_up: float = 1, debug: bool = False, query_interval: float = 60, ): """ Synthesise a real-time matched-filter process for old data. Parameters ---------- triggering_event: The Event that should trigger the system (must have happened in the past) database_duration: The duration to create the template database for in days prior to the triggering event config: Configuration for this synthesis detection_runtime: Maximum run-time for the detector in seconds make_templates: Whether templates need to be made or not. speed_up: Speed-up factor for detector - stream data faster than "real-time". debug: Whether to run logging in debug or not query_interval: How often to query the waveform server in seconds. Smaller numbers will query more often, but this is limited by disk read speeds - make sure you don't go too small and make your system stall! """ if debug: config.log_level = "DEBUG" print("Using the following configuration:\n{0}".format(config)) config.setup_logging() client = config.rt_match_filter.get_client() trigger_origin = (triggering_event.preferred_origin() or triggering_event.origins[0]) region = estimate_region(triggering_event) database_starttime = trigger_origin.time - (database_duration * 86400) database_endtime = trigger_origin.time trigger_func = partial( magnitude_rate_trigger_func, magnitude_threshold=config.reactor.magnitude_threshold, rate_threshold=config.reactor.rate_threshold, rate_bin=config.reactor.rate_radius, minimum_events_in_bin=config.reactor.minimum_events_in_bin) template_bank = TemplateBank( config.database_manager.event_path, name_structure=config.database_manager.name_structure, event_format=config.database_manager.event_format, path_structure=config.database_manager.path_structure, event_ext=config.database_manager.event_ext, executor=None) if make_templates: Logger.info("Downloading template events") catalog = client.get_events(starttime=database_starttime, endtime=database_endtime, **region) Logger.info(f"Downloaded {len(catalog)} events") Logger.info("Building template database") template_bank.make_templates(catalog=catalog, client=client, **config.template) else: template_bank.update_index() tribe = template_bank.get_templates(starttime=database_starttime, endtime=database_endtime, **region) inventory = get_inventory(client, tribe, triggering_event=triggering_event, max_distance=config.rt_match_filter.max_distance, n_stations=config.rt_match_filter.n_stations) config.plot.update({"offline": True}) # Use to use data time-stamps Logger.info("Downloading data") wavebank = WaveBank("simulation_wavebank") for network in inventory: for station in network: for channel in station: try: st = client.get_waveforms( network=network.code, station=station.code, channel=channel.code, location=channel.location_code, starttime=trigger_origin.time - 60., endtime=trigger_origin.time + detection_runtime) except Exception as e: Logger.error("Could not download data for " f"{network.code}.{station.code}." f"{channel.location_code}.{channel.code}") Logger.error(e) continue wavebank.put_waveforms(st) # Set up config to use the wavebank rather than FDSN. config.streaming.update({ "rt_client_url": str(wavebank.bank_path), "rt_client_type": "obsplus", "starttime": trigger_origin.time - 60, "speed_up": speed_up, "query_interval": 1.0 }) listener = CatalogListener(client=client, catalog_lookup_kwargs=region, template_bank=template_bank, interval=query_interval, keep=86400, catalog=None, waveform_client=client) listener._speed_up = speed_up listener._test_start_step = UTCDateTime.now() - trigger_origin.time listener._test_start_step += 60 # Start up 1 minute before the event reactor = Reactor(client=client, listener=listener, trigger_func=trigger_func, template_database=template_bank, config=config) Logger.info("Starting reactor") reactor.run(max_run_length=config.reactor.max_run_length)