def get_geoinf(x1,y1,x2,y2,inp='coord'): if inp == 'coord': try: dist=gps2DistAzimuth(x1, y1, x2, y2)[0] az=gps2DistAzimuth(x1, y1, x2, y2)[1] baz=gps2DistAzimuth(x1, y1, x2, y2)[2] except NameError: dist=gps2dist_azimuth(x1, y1, x2, y2)[0] az=gps2dist_azimuth(x1, y1, x2, y2)[1] baz=gps2dist_azimuth(x1, y1, x2, y2)[2] return (x1, y1, x2, y2, dist, az, baz)
def get_geoinf(x1, y1, x2, y2, inp='coord'): if inp == 'coord': try: dist = gps2DistAzimuth(x1, y1, x2, y2)[0] az = gps2DistAzimuth(x1, y1, x2, y2)[1] baz = gps2DistAzimuth(x1, y1, x2, y2)[2] except NameError: dist = gps2dist_azimuth(x1, y1, x2, y2)[0] az = gps2dist_azimuth(x1, y1, x2, y2)[1] baz = gps2dist_azimuth(x1, y1, x2, y2)[2] return (x1, y1, x2, y2, dist, az, baz)
def get_nearest_event_description(self, latitude, longitude, database=None): """ Get the nearest place to a lat/lon from a db with a 'places' table Inputs ------ database : str of database with 'places' table latitude : float of latitude longitude : float of longitude Returns : string of the distance and compass azimuth to a place """ if database is None: database = self.place_db try: curs = connect(database).cursor(row_factory=OrderedDictRow) nrecs = curs.execute.lookup(table='places') stats = array([gps2DistAzimuth(latitude, longitude, r['lat'], r['lon']) for r in curs]) ind = stats.argmin(0)[0] minstats = stats[ind] curs.scroll(int(ind), 'absolute') minrec = curs.fetchone() dist, azi, backazi = minstats compass = azimuth2compass(backazi) place_info = {'distance': dist/1000., 'direction': compass, 'city': minrec['place'], 'state': minrec['state']} curs.close() s = "{distance:0.1f} km {direction} of {city}, {state}".format(**place_info) return self._nearest_cities_description(s) except: return None
def blast_check(kblist,slist,bcepimax,bcdayst,bcdayen,bcminct,fplog05,bcYYst,bcYYen): pblist = [] print "" print "Blast Detector:" lS=len(slist) lB=len(kblist) print "Number of known blast-events: ",lB print "" print "Potential blast-events in extracted event list:" fplog05.write("Number of known blast-events: %8d\n" % (lB)) fplog05.write("\n") fplog05.write("Potential blast-events in extracted event list:\n") #Loop over potential (known blast events): for i in range(lS): otim = UTCDateTime(slist[i].timestamp) #Still missing, convert to local time... cntb = 0 if(slist[i].etype != "E") and (slist[i].hh >= bcdayst) and (slist[i].hh <= bcdayen) and (otim.weekday < 6) and (otim.year >= bcYYst) and (otim.year < bcYYen): #Calculate distance to known blast and count number of blast events within radius: for j in range(lB): epid=gps2DistAzimuth(slist[i].lat,slist[i].lon,kblist[j].lat,kblist[j].lon) if(epid[0]/1000.0 <= bcepimax): cntb += 1 if(cntb >= bcminct): pblist.append(slist[i]) print "#%03d | %s" % (cntb,slist[i]) fplog05.write("#%03d | %s\n" % (cntb,slist[i])) return pblist
def get_interstation_distance(station1, station2, coordinates="DEG"): """Returns the distance in km between `station1` and `station2`. .. warning:: Currently the stations coordinates system have to be the same! :type station1: :class:`~msnoise.msnoise_table_def.Station` :param station1: A Station object :type station2: :class:`~msnoise.msnoise_table_def.Station` :param station2: A Station object :type coordinates: str :param coordinates: The coordinates system. "DEG" is WGS84 latitude/ longitude in degrees. "UTM" is expressed in meters. :rtype: float :returns: The interstation distance in km """ if coordinates == "DEG": dist, azim, bazim = gps2DistAzimuth(station1.Y, station1.X, station2.Y, station2.X) return dist / 1.e3 else: dist = np.hypot(float(station1.X - station2.X), float(station1.Y - station2.Y)) / 1.e3 return dist
def azimuth(coordinates, x0, y0, x1, y1): """ Returns the azimuth between two coordinate sets. :type coordinates: str :param coordinates: {'DEG', 'UTM', 'MIX'} :type x0: float :param x0: X coordinate of station 1 :type y0: float :param y0: Y coordinate of station 1 :type x1: float :param x1: X coordinate of station 2 :type y1: float :param y1: Y coordinate of station 2 :rtype: float :returns: The azimuth in degrees """ if coordinates == "DEG": dist, azim, bazim = gps2DistAzimuth(y0, x0, y1, x1) return azim elif coordinates == 'UTM': azim = 90. - np.arctan2((y1 - y0), (x1 - x0)) * 180. / np.pi return azim else: print "Please consider having a single coordinate system for\ all stations" return 0
def on_release(event): lon_release, lat_release = m(event.xdata, event.ydata, 'inverse') dist_km = gps2DistAzimuth(lat_press, lon_press, lat_release, lon_release)[0] / 1000. dist_degree = gps2DistDegree(lat_press, lon_press, lat_release, lon_release) if dist_km > 0.1: print 'position release lat: %.2f lon: %.2f' % (lat_release, lon_release) print 'Distance between points: %.2f degree or %.2f km' % (dist_degree, dist_km)
def _mig(lats, lons, perc, ret, stations, st1, st2, i0, tr, velocity, sr, normalize): for x, lon in enumerate(lons): if x % 10 == 0: sys.stdout.write('Progress[%.2f%%]\r' % (100. * (x + 1) / len(lons) * perc)) sys.stdout.flush() for y, lat in enumerate(lats): dist1 = gps2DistAzimuth(stations[st1].latitude, stations[st1].longitude, lat, lon)[0] / 1000. if st1 == st2: dist2 = dist1 else: dist2 = gps2DistAzimuth(stations[st2].latitude, stations[st2].longitude, lat, lon)[0] / 1000 time = (dist1 + dist2) / velocity i = int(round(time * sr)) if i0 < i < tr.stats.npts: to_add = tr.data[i] if normalize: to_add /= np.max(np.abs(tr.data[i0:])) ret[x, y] += to_add
def dist(self, st1, st2, indeg=False): dist_deg = gps2DistDegree(self[st1].latitude, self[st1].longitude, self[st2].latitude, self[st2].longitude) dist_km = gps2DistAzimuth(self[st1].latitude, self[st1].longitude, self[st2].latitude, self[st2].longitude)[0] / 1.e3 if indeg is True: return dist_deg elif indeg is False: return dist_km else: return dist_km, dist_deg
def on_release(event): lon_release, lat_release = m(event.xdata, event.ydata, 'inverse') dist_km = gps2DistAzimuth(lat_press, lon_press, lat_release, lon_release)[0] / 1000. dist_degree = gps2DistDegree(lat_press, lon_press, lat_release, lon_release) if dist_km > 0.1: print 'position release lat: %.2f lon: %.2f' % (lat_release, lon_release) print 'Distance between points: %.2f degree or %.2f km' % ( dist_degree, dist_km)
def migrate_sep(stream, stations, lats, lons, velocity, skip=0, normalize=True, station_splitter='-'): ret = np.zeros((len(lons), len(lats))) sr = stream[0].stats.sampling_rate for l, tr in enumerate(stream): st1, st2 = tr.stats.station.split(station_splitter) st1, st2 = st1[:-1], st2[:-1] dist_st = gps2DistAzimuth(stations[st1].latitude, stations[st1].longitude, stations[st2].latitude, stations[st2].longitude)[0] / 1000. i0 = int(round((dist_st / velocity + skip) * sr)) run_in_separate_process(_mig, lats, lons, (l + 1.) / len(stream), ret, stations, st1, st2, i0, tr, velocity, sr, normalize) return ret
def single_phase(self): events = self.assoc_db.query(Associated).all() for event in events: event_id = event.id ot = event.ot #print event_id,ot # Pick phases that are between origintime and origintime+max_tt sta_assoc = [] for sta, in self.assoc_db.query(PickModified.sta).filter(PickModified.assoc_id==event_id).distinct().all(): # only associated single phase from stations not contribute p and s pairs sta_assoc.append(sta) # associate single phase for sta, in self.assoc_db.query(PickModified.sta).filter(PickModified.assoc_id==None).filter(PickModified.time>ot).filter(PickModified.time<=(ot+timedelta(seconds=self.max_tt))).distinct().all(): station = self.tt_stations_db_1D.query(Station1D).filter(Station1D.sta==sta).first() #print event.latitude,event.longitude,sta,station.latitude,station.longitude d_km = gps2DistAzimuth(event.latitude,event.longitude,station.latitude,station.longitude)[0]/1000. if (d_km < self.max_km) and (sta not in sta_assoc): # only associated single phase from stations not contribute p and s pairs tt,d_diff = tt_km(self.tt_stations_db_1D,d_km) picks_p = self.assoc_db.query(PickModified).filter(PickModified.sta==sta).filter(PickModified.time>=(ot+timedelta(seconds=tt.p_tt-0.5*self.aggr_window))).filter(PickModified.time<=(ot+timedelta(seconds=tt.p_tt+0.5*self.aggr_window))).all() #print 'picks_p: ',picks_p, 'tt.p_tt: ',tt.p_tt # if there are more than one modified pick in the aggr_window range, only associate the first modified pick if picks_p: modi_pick = picks_p[0] # the first modified pick modi_pick.phase = 'P' modi_pick.assoc_id = event.id modi_pick.locate_flag = False # Associated all the picks contribute to this single modified picks with assoc_id and phase picks=self.assoc_db.query(Pick).filter(Pick.modified_id==modi_pick.id).all() for pick in picks: pick.phase='P' pick.assoc_id=event.id pick.locate_flag = False picks_s = self.assoc_db.query(PickModified).filter(PickModified.sta==sta).filter(PickModified.time>=(ot+timedelta(seconds=tt.s_tt-0.5*self.aggr_window))).filter(PickModified.time<=(ot+timedelta(seconds=tt.s_tt+0.5*self.aggr_window))).all() # if there are more than one modified pick in the aggr_window range, only associate the first modified pick if picks_s: modi_pick = picks_s[0] # the first modified pick modi_pick.phase = 'S' modi_pick.assoc_id = event.id modi_pick.locate_flag = None # Associated all the picks contribute to this single modified picks with assoc_id and phase picks=self.assoc_db.query(Pick).filter(Pick.modified_id==modi_pick.id).all() for pick in picks: pick.phase='S' pick.assoc_id=event.id pick.locate_flag = None self.assoc_db.commit()
def azimuth(coordinates, x0, y0, x1, y1): if coordinates == "DEG": dist, azim, bazim = gps2DistAzimuth(y0,x0,y1,x1) # print dist, azim, bazi return azim elif coordinates == 'UTM': azim = 90. - np.arctan2((y1-y0),(x1-x0)) *180./np.pi # print azim return azim else: print "woooooow, please consider having a single coordinate system for all stations" return 0
def coords2azbazinc(stream, origin): """ Returns azimuth, backazimuth and incidence angle from station coordinates given in first trace of stream and from event location specified in origin dictionary. """ sta_coords = stream[0].stats.coordinates dist, bazim, azim = gps2DistAzimuth(sta_coords.latitude, sta_coords.longitude, origin['Latitude'], origin['Longitude']) elev_diff = sta_coords.elevation - origin['Depth'] * 1000 inci = math.atan2(dist, elev_diff) * 180.0 / math.pi return azim, bazim, inci
def residuals_minimum(location,args): # from obspy.core.util import gps2DistAzimuth L=len(args) residuals=0 i=0 while True: residuals=residuals+(gps2DistAzimuth(location[1],location[0],args[i][2],args[i][1])[0]/1000*180/(np.pi*6371)-args[i][4])**2 if i==L-1: break else: i=i+1 return np.sqrt(residuals/L)
def pick(self, latitude=None, longitude=None, minval=0, maxval=180, indegree=True, after='1900-01-01', before='3000-01-01', bigger=0., smaller=10., replace=True): """ Pick events fullfilling the given conditions. :param latitude, longitude: coordinates for distance condition :param minval, maxval: distance of event has to be between this values :param indegree: True if minval and maxval in deg, False if in km :param after, before: UTCDateTime objects or strings with time range :param bigger, smaller: magnitude range :param replace: if True the data in the event list is overwritten :return: picked Events instance """ if indegree: degorkm = 'deg' else: degorkm = 'km' newdata = [] dist = 50 for event in self[::-1]: if latitude != None and longitude != None: if not indegree: dist = gps2DistAzimuth(event.latitude, event.longitude, latitude, longitude)[0] / 1000. else: dist = gps2DistDegree(event.latitude, event.longitude, latitude, longitude) if bigger <= event.magnitude and smaller >= event.magnitude and \ dist >= minval and dist <= maxval and \ UTC(after) <= event.datetime and UTC(before) >= event.datetime: newdata.append(event) elif replace: self.remove(event) if latitude == None: latitude = 0 if longitude == None: longitude = 0 log.info( 'Pick %d events with distance between %d%s and %d%s from coordinates lat:%5.2f lon:%5.2f; between the dates %s and %s and between the magnitudes %3.1f and %3.1f' % (len(newdata), minval, degorkm, maxval, degorkm, latitude, longitude, after, before, bigger, smaller)) return self.__class__(newdata[::-1])
def locating(guess,*args): # from obspy.core.util import gps2DistAzimuth L=len(args) residuals=0 i=0 while True: # gps2DistAzimuth(lat1, lon1, lat2, lon2) Returns: (Great circle distance in m, azimuth A->B in degrees, azimuth B->A in degrees) residuals=residuals+(gps2DistAzimuth(guess[1],guess[0],args[i][2],args[i][1])[0]/1000*180/(np.pi*6371)-args[i][4])**2 # np.sqrt((guess[0]-args[i][1])**2+(guess[1]-args[i][2])**2)-args[i][4])**2 if i==L-1: break else: i=i+1 return np.sqrt(residuals/L)
def coords2azbazinc(stream, origin): """ Returns azimuth, backazimuth and incidence angle from station coordinates given in first trace of stream and from event location specified in origin dictionary. """ sta_coords = stream[0].stats.coordinates dist, bazim, azim = gps2DistAzimuth(sta_coords.latitude, sta_coords.longitude, float(origin.latitude), float(origin.longitude)) elev_diff = sta_coords.elevation - float(origin.depth) inci = math.atan2(dist, elev_diff) * 180.0 / math.pi return azim, bazim, inci
def migrate(stream, stations, lats, lons, velocity, skip=0, normalize=True, station_splitter='-'): """ migrate reflections of autocorrs and xcorrs to surface loop over stream loop over lons loop over lats check (dist_st / velocity + skip) * sr < (dist1 + dist2) / velocity * sr < tr.stats.npts """ ret = np.zeros((len(lons), len(lats))) sr = stream[0].stats.sampling_rate for l, tr in enumerate(stream): st1, st2 = tr.stats.station.split(station_splitter) st1, st2 = st1[:-1], st2[:-1] dist_st = gps2DistAzimuth(stations[st1].latitude, stations[st1].longitude, stations[st2].latitude, stations[st2].longitude)[0] / 1000. i0 = int(round((dist_st / velocity + skip) * sr)) for x, lon in enumerate(lons): if x % 10 == 0: sys.stdout.write('Progress[%.2f%%]\r' % (100. * (x + 1) / len(lons) * (l + 1) / len(stream))) sys.stdout.flush() for y, lat in enumerate(lats): dist1 = gps2DistAzimuth(stations[st1].latitude, stations[st1].longitude, lat, lon)[0] / 1000. if st1 == st2: dist2 = dist1 else: dist2 = gps2DistAzimuth(stations[st2].latitude, stations[st2].longitude, lat, lon)[0] / 1000 time = (dist1 + dist2) / velocity i = int(round(time * sr)) if i0 < i < tr.stats.npts: to_add = tr.data[i] if normalize: to_add /= np.max(np.abs(tr.data[i0:])) ret[x, y] += to_add gc.collect(2) return ret
def test_util_gps2dist(self): """ Tests gps2dist against seispy. """ try: import seis.geo from obspy.core.util import gps2DistAzimuth except ImportError: pass else: dist_deg = sito.util.gps2DistDegree(10, 20, 30, 40) dist_km, az1, az2 = gps2DistAzimuth(10, 20, 30, 40) #@UnusedVariable dist_deg_seis, az1_seis, az2_seis = seis.geo.delazi(10, 20, 30, 40) #print util.gps2dist(10, 20, 30, 40) #print seis.geo.delazi(10, 20, 30, 40) self.assertEqual(abs(dist_deg - dist_deg_seis) < 1e-5, True) self.assertEqual(abs(az1 - az1_seis) < 0.2, True) # one of both routines is not working exactly self.assertEqual(abs(az2 - az2_seis) < 0.2, True)
def azimuth(x0, y0, x1, y1): """ Returns the azimuth between two coordinate sets. :type x0: float :param x0: X coordinate of station 1 :type y0: float :param y0: Y coordinate of station 1 :type x1: float :param x1: X coordinate of station 2 :type y1: float :param y1: Y coordinate of station 2 :rtype: float :returns: The azimuth in degrees """ dist, azim, bazim = gps2DistAzimuth(y0, x0, y1, x1) return azim
def gps2dist(lat1, lon1, lat2, lon2): """ Return distance in degree, in km, azimuth 1 and azimuth 2. Arguments: lat1: Latitude of point A in degrees (positive for northern, negative for southern hemisphere) lon1: Longitude of point A in degrees (positive for eastern, negative for western hemisphere) lat2: Latitude of point B in degrees (positive for northern, negative for southern hemisphere) lon2: Longitude of point B in degrees (positive for eastern, negative for western hemisphere) return: (Great circle distance in deg, in km, azimuth A->B in degrees, azimuth B->A in degrees) """ distm, az1, az2 = gps2DistAzimuth(lat1, lon1, lat2, lon2) distdeg = \ arccosd(sind(lat1) * sind(lat2) + cosd(lat1) * cosd(lat2) * cosd(lon1 - lon2)) return distdeg, distm / 1000., az1, az2
def parser(fn, trueot=None, trueloc=None, format='old'): fh = open(fn) contents = [] # skip header lines fh.readline() fh.readline() for l in fh.readlines(): l = l.rstrip() a = l.split('|') mag = float(a[0]) lat = float(a[1]) lon = float(a[2]) if format == 'old': dep = float(a[3]) ct = UTCDateTime(a[4]) ot = UTCDateTime(a[5]) likeh = float(a[-1]) else: dep = float(a[4]) ct = UTCDateTime(a[5]) ot = UTCDateTime(a[6]) likeh = float(a[7]) if trueot is None: tdiff = float(ct - ot) else: tdiff = float(ct - trueot) if trueloc is None: ddiff = 0 else: dist, az, baz = gps2DistAzimuth(lat, lon, *trueloc) ddiff = dist / 1000. if format != 'old': nstorig = int(a[-2]) nstmag = int(a[-1]) temp = [mag, lat, lon, dep, ct, ot, tdiff, ddiff, likeh, nstorig, nstmag] else: temp = [mag, lat, lon, dep, ct, ot, tdiff, ddiff, likeh] contents.append(temp) return np.array(contents)
def pick(self, latitude=None, longitude=None, minval=0, maxval=180, indegree=True, after='1900-01-01', before='3000-01-01', bigger=0., smaller=10., replace=True): """ Pick events fullfilling the given conditions. :param latitude, longitude: coordinates for distance condition :param minval, maxval: distance of event has to be between this values :param indegree: True if minval and maxval in deg, False if in km :param after, before: UTCDateTime objects or strings with time range :param bigger, smaller: magnitude range :param replace: if True the data in the event list is overwritten :return: picked Events instance """ if indegree: degorkm = 'deg' else: degorkm = 'km' newdata = [] dist = 50 for event in self[::-1]: if latitude != None and longitude != None: if not indegree: dist = gps2DistAzimuth(event.latitude, event.longitude, latitude, longitude)[0] / 1000. else: dist = gps2DistDegree(event.latitude, event.longitude, latitude, longitude) if bigger <= event.magnitude and smaller >= event.magnitude and \ dist >= minval and dist <= maxval and \ UTC(after) <= event.datetime and UTC(before) >= event.datetime: newdata.append(event) elif replace: self.remove(event) if latitude == None: latitude = 0 if longitude == None: longitude = 0 log.info('Pick %d events with distance between %d%s and %d%s from coordinates lat:%5.2f lon:%5.2f; between the dates %s and %s and between the magnitudes %3.1f and %3.1f' % (len(newdata), minval, degorkm, maxval, degorkm, latitude, longitude, after, before, bigger, smaller)) return self.__class__(newdata[::-1])
def eventPicker( data, component='all', phase='P', window=(-100, 400), filter=(None, None), new_sampling_rate=100, write=True, #@ReservedAssignment **kwargs): """ Pick window around onset of events from mseed files. The resulting stream is written in seperate files for each station and year. :param data: data object with stations property and getRawStream, writeRFEvents methods :param events: file with events, Events object or None (in this case kwargs have to be defined) - passed to _getEvents :param component: 'Z', 'N', 'E' or 'all' :param phase: which ponset is used? 'P', 'PP' or 'S' or something else consider that events must show this phase for the stations :param window: window around pnset in seconds :param filter: filter stream between these frequencies :param new_sampling_rate: downsample stream to rhis sampling rate :param write: if True: everything is written to files if False: return stream object :kwargs: passed to _getEvents - in the end they are passed to events.Events.load function if param events == None """ log.info('Start event picker: %s' % util.parameters()) try: log.info('Data used %s' % data.raw) except: log.info('Data regex used %s' % data.raw_regex) log.info('Extraced data for events will be saved in %s' % data.rf_events) if data.events == None and len(kwargs) == 0: raise Exception('No arguments to determine events!') failure_list = [] if write: stream_return = None else: stream_return = Stream() stations = data.stations all_events = _getEvents(data.events, **kwargs) all_events.sort() log.info('Events between %s and %s' % (all_events[0].datetime.date, all_events[-1].datetime.date)) first_year = all_events[0].datetime.year last_year = all_events[-1].datetime.year for station_name, station in stations.items(): for year in range(first_year, last_year + 1): events = all_events.pick(after='%s-1-1' % year, before='%s-1-1' % (year + 1), replace=False) stream_year = Stream() for event in events: dist = util.gps2DistDegree(station.latitude, station.longitude, event.latitude, event.longitude) baz = gps2DistAzimuth(station.latitude, station.longitude, event.latitude, event.longitude)[1] arrival = util.ttt(dist, event.depth).findPhase(phase) if arrival == None: log.warning( 'Phase %s not present at distance %s depth %s' % (phase, dist, event.depth)) arrival = util.ttt(dist, event.depth)[0] onset = event.datetime + arrival.time t1 = onset + window[0] t2 = onset + window[1] try: stream = data.getRawStream(t1, station_name, component, t2) except Exception as ex: failure_list.append((station_name, event.id, str(ex))) continue # Write header entries and basic data processing (filtering, downsampling) stats = AttribDict({ 'event': event, 'station': station_name, 'dist': dist, 'azi': baz, 'inci': arrival.inci, phase.lower() + 'onset': onset, 'slowness': arrival.slow, 'filter': '' }) for trace in stream: trace.stats.update(stats) stream_year.extend(stream) if len(stream_year) > 0: stream_year.demean() stream_year.detrend() if filter[0] != None or filter[1] != None: stream_year.filter2(freqmin=filter[0], freqmax=filter[1]) if new_sampling_rate <= ( max(stream_year.getHI('sampling_rate')) / 2.): stream_year.downsample2(new_sampling_rate) if write: data.writeRFEvents(stream_year, station_name, event.datetime) else: stream_return.extend(stream_year) if len(failure_list) > 0: log.warning('Failed to load the data for:\nstation event.id ' 'reason\n' + '\n'.join([' '.join(entry) for entry in failure_list])) if write: return failure_list else: return stream_return, failure_list
def alert_times_map(self, fns, m=None, fig=None, ax=None, scale=10000., cb=True, disterr=False, interactive=False, eventinfo=None, msscale=1, cmapname='jet'): """ Plot a map of observed alert times. """ cmap = cm.ScalarMappable(norm=Normalize(vmin=6, vmax=25), cmap=cmapname) rp = ReportsParser(dmin=UTCDateTime(2012, 1, 1, 0, 0, 0), dmax=UTCDateTime(2013, 11, 1, 0, 0, 0)) t = EventCA() rp.sfilter = t.point_in_polygon for _f in fns: rp.read_reports(_f) correct = rp.get_correct(mmin=3.5, mmax=10.0) pid = correct[:, 0] ot = correct[:, 2].astype('float') lats = correct[:, 3].astype('float') lons = correct[:, 4].astype('float') mags = correct[:, 6].astype('float') ts1 = correct[:, 7].astype('float') lats1 = correct[:, 9].astype('float') lons1 = correct[:, 10].astype('float') mags1 = correct[:, 12].astype('float') rfns = correct[:, 21] diff = ts1 - ot magdiff = mags - mags1 if m is None and fig is None and ax is None: fig = plt.figure() ax = fig.add_axes([0.1, 0.1, 0.8, 0.8]) m = self.background_map(ax) dataX = [] dataY = [] values = [] # load event info cnt = 0 allcnt = 0 for lon, lat, delay, evid, lat1, lon1, dmag, time, mag, rfn in \ zip(lons, lats, diff, pid, lats1, lons1, magdiff, ot, mags, rfns): allcnt += 1 try: if eventinfo is not None and len(eventinfo[evid]) != 4: print "Event %s does not have 4 initial picks." % evid continue except KeyError: print "No event information available for: %s (%s)" % ( evid, UTCDateTime(time)) continue if evid in self.event_excludes: print "Event %s was set to be excluded." % evid continue cnt += 1 ddist, az, baz = gps2DistAzimuth(lat, lon, lat1, lon1) ddist /= 1000. x, y = m(lon, lat) dataX.append(x) dataY.append(y) info = '%s: %.2f %.2f %s' % (UTCDateTime(time), delay, mag, evid) for _st in eventinfo[evid]: info += ' %s' % _st values.append(info) cl = cmap.to_rgba(delay) if disterr: factor = math.sqrt(abs(float(ddist))) sl2 = scale * factor p2 = Wedge((x, y), sl2, 0, 360, facecolor=cl, edgecolor='black', picker=5, lw=1.0) ax.add_patch(p2) else: m.plot(x, y, ms=8 * msscale, c=cl, marker='o', picker=5.) print "Plotted %d out of %d events." % (cnt, allcnt) if interactive: self.popup(fig, dataX, dataY, values) if cb: # Colorbar cax = fig.add_axes([0.87, 0.1, 0.05, 0.8]) cb = ColorbarBase(cax, cmap=cmapname, norm=Normalize(vmin=6., vmax=25.)) cb.set_label('Time since origin time [s]')
def section_plot(self, assoc_id, files, seconds_ahead = 5, record_length = 100, channel = 'Z'): station=self.assoc_db.query(Candidate.sta).filter(Candidate.assoc_id==assoc_id).all() sta_list=[] for sta, in station: sta_list.append(str(sta)) station_single = self.assoc_db.query(Pick.sta).filter(Pick.assoc_id==assoc_id).filter(Pick.locate_flag == None).all() for sta, in station_single: sta_list.append(str(sta)) #print sta_list eve=self.assoc_db.query(Associated).filter(Associated.id==assoc_id).first() # Earthquakes' epicenter eq_lat = eve.latitude eq_lon = eve.longitude # Reading the waveforms ST = Stream() for file in files: st = read(file) ST += st # in case of some seismometer use channel code like BH1, BH2 or BH3, resign the channel code as: if channel=='E' or channel=='e': Chan='E1' elif channel=='N' or channel=='n': Chan='N2' elif channel=='Z' or channel=='z': Chan='Z3' else: print('Please input component E, e, N, n, Z, or z, the default is Z') # Calculating distance from headers lat/lon ST_new = Stream()#;print ST for tr in ST: if tr.stats.channel[2] in Chan and tr.stats.station in sta_list: if tr.stats.starttime.datetime < eve.ot and tr.stats.endtime.datetime > eve.ot: tr.trim(UTCDateTime(eve.ot-timedelta(seconds=seconds_ahead)), UTCDateTime(eve.ot+timedelta(seconds=record_length))) ST_new+=tr #print ST_new.__str__(extended=True) while True: ST_new_sta=[] for tr in ST_new: ST_new_sta.append(tr.stats.station) duplicate=list(set([tr for tr in ST_new_sta if ST_new_sta.count(tr)>1])) if not duplicate: break index=[i for (i,j) in enumerate(ST_new_sta) if j==duplicate[-1]] i=0 while True: if ST_new[index[i]].stats.npts<ST_new[index[i+1]].stats.npts: del ST_new[index[i]] break elif ST_new[index[i]].stats.npts>=ST_new[index[i+1]].stats.npts: del ST_new[index[i+1]] break #print ST_new.__str__(extended=True) ST_new.detrend('demean') # ST_new.filter('bandpass', freqmin=0.1, freqmax=100) factor=10 numRows=len(ST_new) segs=[];ticklocs=[];sta=[];circle_x=[];circle_y=[];segs_picks=[];ticklocs_picks=[] for tr in ST_new: dmax=tr.data.max() dmin=tr.data.min() data=tr.data/(dmax-dmin)*factor t=np.arange(0,round(tr.stats.npts/tr.stats.sampling_rate/tr.stats.delta))*tr.stats.delta # due to the float point arithmetic issue, can not use "t=np.arange(0,tr.stats.npts/tr.stats.sampling_rate,tr.stats.delta)" segs.append(np.hstack((data[:,np.newaxis],t[:,np.newaxis]))) lon,lat = self.tt_stations_db_3D.query(Station3D.longitude,Station3D.latitude).filter(Station3D.sta==tr.stats.station).first() distance = int(gps2DistAzimuth(lat,lon,eq_lat,eq_lon)[0]/1000.) #gps2DistAzimuth return in meters, convert to km by /1000 # distance=self.assoc_db.query(Candidate.d_km).filter(Candidate.assoc_id==assoc_id).filter(Candidate.sta==tr.stats.station).first()[0]#;print distance,tr.stats.station ticklocs.append(distance) sta.append(tr.stats.station) # DOT plot where picks are picked, notice that for vertical trace plot p is queried from Pick table, s from PickModified table # horizontal trace plot p and s queried from PickModified table if channel=='Z3': picks_p=self.assoc_db.query(Pick.time).filter(Pick.assoc_id==assoc_id).filter(Pick.sta==tr.stats.station).filter(Pick.chan==tr.stats.channel).filter(Pick.phase=='P').all() if not picks_p: picks_p=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).filter(PickModified.phase=='P').all() picks_s=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).filter(PickModified.phase=='S').all() # print picks_p,picks_s else: picks_p=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).filter(PickModified.phase=='P').all() picks_s=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).filter(PickModified.phase=='S').all() # print picks_p,picks_s picks=picks_p+picks_s # picks=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).all() for pick, in picks: index=int((pick-eve.ot+timedelta(seconds=seconds_ahead)).total_seconds()/tr.stats.delta)#;print pick,eve.ot,index,len(data) circle_x.append(distance+data[index]) circle_y.append(t[index]) # BAR plot where picks are picked t_picks=np.array([t[index],t[index]]) data_picks=np.array([data.min(),data.max()]) segs_picks.append(np.hstack((data_picks[:,np.newaxis],t_picks[:,np.newaxis]))) ticklocs_picks.append(distance) tick_max=max(ticklocs) tick_min=min(ticklocs) offsets=np.zeros((numRows,2),dtype=float) offsets[:,0]=ticklocs offsets_picks=np.zeros((len(segs_picks),2),dtype=float) offsets_picks[:,0]=ticklocs_picks #lines=LineCollection(segs,offsets=offsets,transOffset=None,linewidths=.25,colors=[colorConverter.to_rgba(i) for i in ('b','g','r','c','m','y','k')]) #color='gray' lines=LineCollection(segs,offsets=offsets,transOffset=None,linewidths=.25,color='gray') #lines_picks=LineCollection(segs_picks,offsets=offsets_picks,transOffset=None,linewidths=1,color='r') lines_picks=LineCollection(segs_picks,offsets=offsets_picks,transOffset=None,linewidths=1,color='k') #print sta,ticklocs fig=plt.figure(figsize=(15,8)) ax1 = fig.add_subplot(111) #ax1.plot(circle_x,circle_y,'o') # blue dots indicating where to cross the waveforms ax1.plot(circle_x,circle_y,'o',c='gray') x0 = tick_min-(tick_max-tick_min)*0.1 x1 = tick_max+(tick_max-tick_min)*0.1 ylim(0,record_length);xlim(0,x1) ax1.add_collection(lines) ax1.add_collection(lines_picks) ax1.set_xticks(ticklocs) ax1.set_xticklabels(sta) ax1.invert_yaxis() ax1.xaxis.tick_top() # ax2 = ax1.twiny() # ax2.xaxis.tick_bottom() plt.setp(plt.xticks()[1], rotation=45) #xlabel('Station (km)') xlabel('channel: '+channel, fontsize=18) ylabel('Record Length (s)', fontsize=18) # plt.title('Section Plot of Event at %s'%(tr.stats.starttime)) # plt.tight_layout() plt.show()
'TOK.2011.328.21.10.54.OKR07.HHN.inv', 'TOK.2011.328.21.10.54.OKR08.HHN.inv', 'TOK.2011.328.21.10.54.OKR09.HHN.inv', 'TOK.2011.328.21.10.54.OKR10.HHN.inv' ] # Earthquakes' epicenter eq_lat = 35.565 eq_lon = -96.792 # Reading the waveforms st = Stream() for waveform in files: st += read(host + waveform) # Calculating distance from SAC headers lat/lon # (trace.stats.sac.stla and trace.stats.sac.stlo) for tr in st: tr.stats.distance = gps2DistAzimuth(tr.stats.sac.stla, tr.stats.sac.stlo, eq_lat, eq_lon)[0] # Setting Network name for plot title tr.stats.network = 'TOK' st.filter('bandpass', freqmin=0.1, freqmax=10) # Plot st.plot(type='section', plot_dx=20e3, recordlength=100, time_down=True, linewidth=.25, grid_linewidth=.25)
def _validate_and_write_waveforms(st, callback, starttime, endtime, scale, source, receiver, db, label, format): if not label: label = "" else: label += "_" for tr in st: # Half the filesize but definitely sufficiently accurate. tr.data = np.require(tr.data, dtype=np.float32) if scale != 1.0: for tr in st: tr.data *= scale # Sanity checks. Raise internal server errors in case something fails. # This should not happen and should have been caught before. if endtime > st[0].stats.endtime: msg = ("Endtime larger than the extracted endtime: endtime=%s, " "largest db endtime=%s" % ( _format_utc_datetime(endtime), _format_utc_datetime(st[0].stats.endtime))) callback((tornado.web.HTTPError(500, log_message=msg, reason=msg), None)) return if starttime < st[0].stats.starttime - 3600.0: msg = ("Starttime more than one hour before the starttime of the " "seismograms.") callback((tornado.web.HTTPError(500, log_message=msg, reason=msg), None)) return if isinstance(source, FiniteSource): mu = None else: mu = st[0].stats.instaseis.mu # Trim, potentially pad with zeroes. st.trim(starttime, endtime, pad=True, fill_value=0.0, nearest_sample=False) # Checked in another function and just a sanity check. assert format in ("miniseed", "saczip") if format == "miniseed": with io.BytesIO() as fh: st.write(fh, format="mseed") fh.seek(0, 0) binary_data = fh.read() callback((binary_data, mu)) # Write a number of SAC files into an archive. elif format == "saczip": byte_strings = [] for tr in st: # Write SAC headers. tr.stats.sac = obspy.core.AttribDict() # Write WGS84 coordinates to the SAC files. tr.stats.sac.stla = geocentric_to_elliptic_latitude( receiver.latitude) tr.stats.sac.stlo = receiver.longitude tr.stats.sac.stdp = receiver.depth_in_m tr.stats.sac.stel = 0.0 if isinstance(source, FiniteSource): tr.stats.sac.evla = geocentric_to_elliptic_latitude( source.hypocenter_latitude) tr.stats.sac.evlo = source.hypocenter_longitude tr.stats.sac.evdp = source.hypocenter_depth_in_m # Force source has no magnitude. if not isinstance(source, ForceSource): tr.stats.sac.mag = source.moment_magnitude src_lat = source.hypocenter_latitude src_lng = source.hypocenter_longitude else: tr.stats.sac.evla = geocentric_to_elliptic_latitude( source.latitude) tr.stats.sac.evlo = source.longitude tr.stats.sac.evdp = source.depth_in_m # Force source has no magnitude. if not isinstance(source, ForceSource): tr.stats.sac.mag = source.moment_magnitude src_lat = source.latitude src_lng = source.longitude # Thats what SPECFEM uses for a moment magnitude.... tr.stats.sac.imagtyp = 55 # The event origin time relative to the reference which I'll # just assume to be the starttime here? tr.stats.sac.o = source.origin_time - starttime # Sac coordinates are elliptical thus it only makes sense to # have elliptical distances. dist_in_m, az, baz = gps2DistAzimuth( lat1=tr.stats.sac.evla, lon1=tr.stats.sac.evlo, lat2=tr.stats.sac.stla, lon2=tr.stats.sac.stlo) tr.stats.sac.dist = dist_in_m / 1000.0 tr.stats.sac.az = az tr.stats.sac.baz = baz # XXX: Is this correct? Maybe better use some function in # geographiclib? tr.stats.sac.gcarc = locations2degrees( lat1=src_lat, long1=src_lng, lat2=receiver.latitude, long2=receiver.longitude) # Some provenance. tr.stats.sac.kuser0 = "InstSeis" tr.stats.sac.kuser1 = db.info.velocity_model[:8] tr.stats.sac.user0 = scale # Prefix version numbers to identify them at a glance. tr.stats.sac.kt7 = "A" + db.info.axisem_version[:7] tr.stats.sac.kt8 = "I" + __version__[:7] with io.BytesIO() as temp: tr.write(temp, format="sac") temp.seek(0, 0) filename = "%s%s.sac" % (label, tr.id) byte_strings.append((filename, temp.read())) callback((byte_strings, mu))
for i in range(len(ls_T_tmp)): os.remove(ls_T_tmp[i]) print "DONE" for i in range(len(ls_E)): try: st_E = read(ls_E[i], format='SAC')[0] st_N = read(os.path.join(add_ev, 'BH', 'dis.%s.%s.%sN' % (st_E.stats.station, st_E.stats.location, st_E.stats.channel[:-1])), format='SAC')[0] (dist, azi, bazi) = gps2DistAzimuth(st_E.stats.sac.evla, st_E.stats.sac.evlo, st_E.stats.sac.stla, st_E.stats.sac.stlo) (tr_data_R, tr_data_T) = rotate.rotate_NE_RT(st_N.data, st_E.data, bazi) tr_R = st_N.copy() tr_T = st_N.copy() tr_R.data = tr_data_R tr_R.stats.channel = 'BHR' tr_T.data = tr_data_T tr_T.stats.channel = 'BHT' #tr_R.write(os.path.join(add_ev, 'BH', # 'dis.%s.%s.%s'
master = read(file_list[k]) master.filter('bandpass', freqmin=low_f, freqmax=high_f, corners=n_poles, zerophase=True) #master.filter('highpass', freq=low_f , corners=n_poles, zerophase=True ) kevnm_master = master[0].stats.sac.kevnm.rstrip() log_line = 'k = ' + str(k) + ' out of ' + str(N) print log_line if (k - 1) % 100 == 0: fid_log = open(log_file, 'a') fid_log.write(log_line + '\n') fid_log.close() if no_limit == False: eq_station = gps2DistAzimuth(master[0].stats.sac.evla, master[0].stats.sac.evlo, \ master[0].stats.sac.stla, master[0].stats.sac.stlo) if master[0].stats.sac.dist > eq_sta_dist_limit: continue for n in range(k + 1, N): test = read(file_list[n]) test.filter('bandpass', freqmin=low_f, freqmax=high_f, corners=n_poles, zerophase=True) #test.filter('highpass', freq=low_f , corners=n_poles, zerophase=True ) eq_dist = 9999. # Declare to a large value if no_limit == False: eq_dist = gps2DistAzimuth(master[0].stats.sac.evla, master[0].stats.sac.evlo, \ test[0].stats.sac.evla, test[0].stats.sac.evlo )[0]/1000
def alert_times_map(self, fns, m=None, fig=None, ax=None, scale=10000., cb=True, disterr=False, interactive=False, eventinfo=None, msscale=1, cmapname='jet'): """ Plot a map of observed alert times. """ cmap = cm.ScalarMappable(norm=Normalize(vmin=6, vmax=25), cmap=cmapname) rp = ReportsParser(dmin=UTCDateTime(2012, 1, 1, 0, 0, 0), dmax=UTCDateTime(2013, 11, 1, 0, 0, 0)) t = EventCA() rp.sfilter = t.point_in_polygon for _f in fns: rp.read_reports(_f) correct = rp.get_correct(mmin=3.5, mmax=10.0) pid = correct[:, 0] ot = correct[:, 2].astype('float') lats = correct[:, 3].astype('float') lons = correct[:, 4].astype('float') mags = correct[:, 6].astype('float') ts1 = correct[:, 7].astype('float') lats1 = correct[:, 9].astype('float') lons1 = correct[:, 10].astype('float') mags1 = correct[:, 12].astype('float') rfns = correct[:, 21] diff = ts1 - ot magdiff = mags - mags1 if m is None and fig is None and ax is None: fig = plt.figure() ax = fig.add_axes([0.1, 0.1, 0.8, 0.8]) m = self.background_map(ax) dataX = [] dataY = [] values = [] # load event info cnt = 0 allcnt = 0 for lon, lat, delay, evid, lat1, lon1, dmag, time, mag, rfn in \ zip(lons, lats, diff, pid, lats1, lons1, magdiff, ot, mags, rfns): allcnt += 1 try: if eventinfo is not None and len(eventinfo[evid]) != 4: print "Event %s does not have 4 initial picks." % evid continue except KeyError: print "No event information available for: %s (%s)" % (evid, UTCDateTime(time)) continue if evid in self.event_excludes: print "Event %s was set to be excluded." % evid continue cnt += 1 ddist, az, baz = gps2DistAzimuth(lat, lon, lat1, lon1) ddist /= 1000. x, y = m(lon, lat) dataX.append(x) dataY.append(y) info = '%s: %.2f %.2f %s' % (UTCDateTime(time), delay, mag, evid) for _st in eventinfo[evid]: info += ' %s' % _st values.append(info) cl = cmap.to_rgba(delay) if disterr: factor = math.sqrt(abs(float(ddist))) sl2 = scale * factor p2 = Wedge((x, y), sl2, 0, 360, facecolor=cl, edgecolor='black', picker=5, lw=1.0) ax.add_patch(p2) else: m.plot(x, y, ms=8 * msscale, c=cl, marker='o', picker=5.) print "Plotted %d out of %d events." % (cnt, allcnt) if interactive: self.popup(fig, dataX, dataY, values) if cb: # Colorbar cax = fig.add_axes([0.87, 0.1, 0.05, 0.8]) cb = ColorbarBase(cax, cmap=cmapname, norm=Normalize(vmin=6., vmax=25.)) cb.set_label('Time since origin time [s]')
from obspy.core.util import gps2DistAzimuth host = 'http://examples.obspy.org/' # Files (fmt: SAC) files = ['TOK.2011.328.21.10.54.OKR01.HHN.inv', 'TOK.2011.328.21.10.54.OKR02.HHN.inv', 'TOK.2011.328.21.10.54.OKR03.HHN.inv', 'TOK.2011.328.21.10.54.OKR04.HHN.inv', 'TOK.2011.328.21.10.54.OKR05.HHN.inv', 'TOK.2011.328.21.10.54.OKR06.HHN.inv', 'TOK.2011.328.21.10.54.OKR07.HHN.inv', 'TOK.2011.328.21.10.54.OKR08.HHN.inv', 'TOK.2011.328.21.10.54.OKR09.HHN.inv', 'TOK.2011.328.21.10.54.OKR10.HHN.inv'] # Earthquakes' epicenter eq_lat = 35.565 eq_lon = -96.792 # Reading the waveforms st = Stream() for waveform in files: st += read(host + waveform) # Calculating distance from SAC headers lat/lon # (trace.stats.sac.stla and trace.stats.sac.stlo) for tr in st: tr.stats.distance = gps2DistAzimuth(tr.stats.sac.stla, tr.stats.sac.stlo, eq_lat, eq_lon)[0] # Setting Network name for plot title tr.stats.network = 'TOK' st.filter('bandpass', freqmin=0.1, freqmax=10) # Plot st.plot(type='section', plot_dx=20e3, recordlength=100, time_down=True, linewidth=.25, grid_linewidth=.25)
def residual(location,args): x=gps2DistAzimuth(location[1],location[0],args[2],args[1])[0]/1000*180/(np.pi*6371)-args[4] return x
def associate_candidates(self): """ Associate all possible candidate events by comparing the projected origin-times. """ #now2 = time.time() dt_ot=timedelta(seconds=self.assoc_ot_uncert) # Query all candidate ots candidate_ots=self.assoc_db.query(Candidate).filter(Candidate.assoc_id==None).order_by(Candidate.ot).all() L_ots=len(candidate_ots) #; print L_ots Array=[] for i in range(L_ots): cluster=self.assoc_db.query(Candidate).filter(Candidate.assoc_id==None).filter(Candidate.ot>=candidate_ots[i].ot).filter(Candidate.ot<(candidate_ots[i].ot+dt_ot)).order_by(Candidate.ot).all() cluster_sta=self.assoc_db.query(Candidate.sta).filter(Candidate.assoc_id==None).filter(Candidate.ot>=candidate_ots[i].ot).filter(Candidate.ot<(candidate_ots[i].ot+dt_ot)).order_by(Candidate.ot).all() l_cluster=len(set(cluster_sta)) Array.append((i,l_cluster,len(cluster))) #print Array Array.sort(key=itemgetter(1), reverse=True) #sort Array by l_cluster, notice Array has been changed #print Array #print 'cluster analysis time:', time.time()-now2, 's' for i in range(len(Array)): index=Array[i][0] if Array[i][1]>=self.nsta_declare: candis=self.assoc_db.query(Candidate).filter(Candidate.assoc_id == None).filter(Candidate.ot >= candidate_ots[index].ot).filter(Candidate.ot < (candidate_ots[index].ot + dt_ot)).order_by(Candidate.ot).all() #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # remove the candidates with the modified picks has been associated picks_associated_id=list(set(self.assoc_db.query(PickModified.id).filter(PickModified.assoc_id != None).all())) index_candis=[] for id, in picks_associated_id: for i,candi in enumerate(candis): if candi.p_modified_id==id or candi.s_modified_id==id: index_candis.append(i) # delete from the end if index_candis: for j in sorted(set(index_candis), reverse=True): del candis[j] #print 'candis',candis # remove the candidates with the modified picks has been associated #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # 1D Associator # store all necessary parameter in lists radius=[] for i,candi in enumerate(candis): # pass in the radius for map plotting lon,lat = self.tt_stations_db_1D.query(Station1D.longitude, Station1D.latitude).filter(Station1D.sta == candi.sta).first() radius.append((candi.sta, lon, lat, candi.d_km, candi.delta, i)) cb = self.comb(radius) #print 'cb',cb rms_sort = [] for i in range(len(cb)): radius_cb = cb[i] if len(radius_cb) >= self.nsta_declare: # self.nsta_declare has to be greater than or equal to 3 location=fmin(locating, [lon,lat], radius_cb, disp = 0) # disp = 1 disp : bool, Set to True to print convergence messages. residual_minimum=residuals_minimum(location,radius_cb) rms_sort.append((location, residual_minimum, i)) # It is possible to have empty rms_sort if rms_sort: rms_sort.sort(key = itemgetter(1)) loc, rms, index = rms_sort[0] # loc is the location before outlier cutoff lon = loc[0] lat = loc[1] matches = cb[index] # matches is one of combination of radius.append([candi.sta, lon, lat, candi.d_km, candi.delta, i]) #print 'location: ', lat, lon, rms #print 'matches',matches # cut off outlier MISMATCHES=[] MATCHES_nol, mismatches = outlier_cutoff(matches, loc, self.cutoff_outlier) # MATCHES_nol is the matches of no outlier, MISMATCHES is the outliers, # which are not for locating, only MATCHES_nol are used for locating if mismatches: MISMATCHES.append(mismatches[0]) while mismatches: loc = fmin(locating, [lon,lat], MATCHES_nol, disp = 0) MATCHES_nol, mismatches = outlier_cutoff(MATCHES_nol, loc, self.cutoff_outlier) if mismatches: MISMATCHES.append(mismatches[0]) #print "MATCHES_nol:",MATCHES_nol,"MISMATCHES:",MISMATCHES # declare event when nsta and RMS are under control nsta = len(MATCHES_nol) if nsta >= self.nsta_declare: LOC = fmin(locating, (lon,lat), MATCHES_nol, disp = 0) LON = round(LOC[0],3) LAT = round(LOC[1],3) OTS = [] for i in range(nsta): OTS.append(candis[MATCHES_nol[i][5]].ot) origintime,ot_unc=datetime_statistics(OTS) RMS = residuals_minimum(LOC, MATCHES_nol) t_create = datetime.utcnow() t_update = datetime.utcnow() if RMS <= self.loc_uncert_thresh: new_event=Associated(origintime,round(ot_unc,3),LAT,LON,round(RMS,3),nsta,t_create,t_update) self.assoc_db.add(new_event) self.assoc_db.flush() self.assoc_db.refresh(new_event) self.assoc_db.commit() event_id=new_event.id print 'event_id:', event_id print 'ot:', origintime, 'ot_uncert:', round(ot_unc,3), 'loc:', LAT,LON, 'loc_uncert', round(RMS,3), 'nsta:', nsta # Associate candidates, picks with the identified event for candi in MATCHES_nol: candis[candi[5]].set_assoc_id(event_id,self.assoc_db,True) self.assoc_db.commit() # Associate candidates from outliers if the d_km intersect loc_uncert if MISMATCHES: for i in range(len(MISMATCHES)): d = gps2DistAzimuth(LAT,LON,MISMATCHES[i][2],MISMATCHES[i][1])[0]/1000 r = MISMATCHES[i][3] uncert_km = RMS * np.pi / 180.0 * 6371 if abs(d - r) <= uncert_km: candis[MISMATCHES[i][5]].set_assoc_id(event_id,self.assoc_db,False) self.assoc_db.commit() # 1D Associator #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ else: break
def PyNASTF(**kwargs): """ PyNASTF: Python Neighbourhood Algorithm STF """ #----------------------------- input handler ----------------------------- config = ConfigParser.RawConfigParser() inputpath = 'in.na.cfg' class input_handler: def __init__(self, inputpath): self.inpath = inputpath self.config = config.read(os.path.join(os.getcwd(), self.inpath)) self.event_address = config.get('General', 'event_address') self.remote_address = config.get('General', 'remote_address') self.network = config.get('General', 'network') self.station = config.get('General', 'station') self.location = config.get('General', 'location') self.channel = config.get('General', 'channel') self.filter = eval(config.get('General', 'filter')) self.lfreq = float(config.get('General', 'lfreq')) self.hfreq = float(config.get('General', 'hfreq')) self.resample = eval(config.get('General', 'resample')) self.sampling_rate = int(config.get('General', 'sampling_rate')) self.min_dist = eval(config.get('General', 'min_dist')) self.max_dist = eval(config.get('General', 'max_dist')) self.bg_model = config.get('General', 'bg_model') self.SNR_limit = eval(config.get('General', 'SNR_limit')) self.plot_ph_no = eval(config.get('General', 'plot_phase_noise')) self.map = eval(config.get('General', 'map')) self.plot_azi = eval(config.get('General', 'plot_azi')) # create the input class inp = input_handler(inputpath) # modifying the input objects in a way to be usable in the next steps! inp.network = inp.network.split(',') for _i in xrange(len(inp.network)): inp.network[_i] = inp.network[_i].strip() inp.channel = inp.channel.split(',') for _i in xrange(len(inp.channel)): inp.channel[_i] = inp.channel[_i].strip() # s_tb: Signal time before, s_ta: Signal time after # n_tb: Noise Time before, n_ta: Noise time after s_tb=-3; s_ta=9 n_tb=-150; n_ta=-30 ev_name, ev_lat, ev_lon, ev_dp, ev_date = \ pdata_reader(address = inp.event_address, remote_address=inp.remote_address) for _i in range(len(ev_name)): ev_name[_i] = os.path.join(ev_name[_i], 'BH') for ev_enum in xrange(len(ev_name)): e_add = ev_name[ev_enum] print '\n===========' print 'Event %s/%s: \n%s' %(ev_enum+1, len(ev_name), e_add) print '===========' if not os.path.isdir(os.path.join(e_add.split('/')[-2], 'infiles')): os.makedirs(os.path.join(e_add.split('/')[-2], 'infiles')) metadata = [] msg_header = 'Event information; Lat, Lon, Depth\n' msg_header += '%.6f %.6f %.6f\n' %(ev_lat[ev_enum],ev_lon[ev_enum], ev_dp[ev_enum]) msg_p = 'P-wave data ' + 17*'*' + '\n' msg_sh = 'SH-wave data ' + 17*'*' + '\n' all_p_data = []; all_sh_data = [] all_sta_add = glob.glob(os.path.join(e_add, '*.*.*.*')) all_sta_add.sort() print len(all_sta_add) for sta_add in all_sta_add: print '.', try: tr = read(sta_add)[0] except Exception, e: print e continue if not inp.network == ['*']: if not tr.stats.network in inp.network: continue if not tr.stats.channel in inp.channel: continue #epi_dist_prev = locations2degrees(tr.stats.sac.evla, tr.stats.sac.evlo, # tr.stats.sac.stla, tr.stats.sac.stlo) epi_km = gps2DistAzimuth(tr.stats.sac.evla, tr.stats.sac.evlo, tr.stats.sac.stla, tr.stats.sac.stlo)[0] epi_dist = kilometer2degrees(epi_km/1000.) # XXX for testing! #epi_dist = tr.stats.sac.gcarc if not inp.min_dist<=epi_dist<=inp.max_dist: continue if 'Z' in tr.stats.channel: tr_tw = time_window(tr, model=inp.bg_model) ph_arr = tr_tw.arr_time(epi_dist, req_phase='P') # XXX for testing #ph_arr = tr.stats.sac.t0 if ph_arr == -12345.0: continue tr = preproc(tr, filter=inp.filter, hfreq=inp.hfreq, lfreq=inp.lfreq, resample=inp.resample, sampling_rate=inp.sampling_rate) SNR, l1_noise, l2_noise, p_data, flag_exist = \ SNR_calculator(tr, ev_date[ev_enum], ph_arr, s_tb=s_tb, s_ta=s_ta, n_tb=n_tb, n_ta=n_ta, method='squared', plot_ph_no=inp.plot_ph_no, address=os.path.join(e_add.split('/')[-2], 'infiles')) if not flag_exist: continue if SNR < inp.SNR_limit: continue innastats_str = '%s\n%.6f %.6f %.6f\n%.6f %.6f %.6f\n' %(tr.stats.station, tr.stats.sac.stla, tr.stats.sac.stlo, ph_arr+s_tb, SNR, l1_noise, l2_noise) az, ba = azbackaz(tr) all_p_data.append([tr.stats.station, tr.stats.location, SNR, az, p_data, innastats_str]) if 'N' in tr.stats.channel: try: tr_E = read(sta_add[:-1] + 'E')[0] except Exception, e: print 'Cannot read: \n%s' %(sta_add[:-1] + 'E') continue tr = preproc(tr, filter=inp.filter, hfreq=inp.hfreq, lfreq=inp.lfreq, resample=inp.resample, sampling_rate=inp.sampling_rate) tr_E = preproc(tr_E, filter=inp.filter, hfreq=inp.hfreq, lfreq=inp.lfreq, resample=inp.resample, sampling_rate=inp.sampling_rate) tr_sh = tr.copy() az, tr_sh.data = rotater(tr, tr_E) if not az: continue tr_tw = time_window(tr_sh, model=inp.bg_model) ph_arr = tr_tw.arr_time(epi_dist, req_phase='S') if ph_arr == -12345.0: continue SNR, l1_noise, l2_noise, sh_data, flag_exist = \ SNR_calculator(tr_sh, ev_date[ev_enum], ph_arr, s_tb=s_tb, s_ta=s_ta, n_tb=n_tb, n_ta=n_ta, method='squared', plot_ph_no=inp.plot_ph_no, address=os.path.join(e_add.split('/')[-2], 'infiles')) if not flag_exist: continue if SNR < inp.SNR_limit: continue innastats_str = '%s\n%.6f %.6f %.6f\n%.6f %.6f %.6f\n' %(tr_sh.stats.station, tr_sh.stats.sac.stla, tr_sh.stats.sac.stlo, ph_arr+s_tb, SNR, l1_noise, l2_noise) all_sh_data.append([tr_sh.stats.station, tr_sh.stats.location, SNR, az, sh_data, innastats_str])
print '%s total number of events' % len(pdata_events) fio_selected_events = open(os.path.join('.', 'results', 'selected_events.txt'), 'w') m2degree = 360./(2.*np.pi*6371000.) enum = 0 for i in range(len(pdata_events)): if not min_mag <= float(pdata_events[i].split(',')[4]) <= max_mag: continue ev_lat = float(pdata_events[i].split(',')[1]) ev_lon = float(pdata_events[i].split(',')[2]) if not min_lat <= ev_lat <= max_lat: continue if not min_lon <= ev_lon <= max_lon: continue cent_ev_gd = gps2DistAzimuth(center_lat, center_lon, ev_lat, ev_lon) if not abs(cent_ev_gd[1] - req_azimuth) <= azimuth_error: continue if not min_date <= int(pdata_events[i].split(',')[0].split('.')[1]) <= max_date: continue dist = cent_ev_gd[0]*m2degree if not min_dist <= dist <= max_dist: continue fio_selected_events.writelines(pdata_events[i]) print '-------------' print pdata_events[i].split(',')[0] print 'info:\nDistance: %s' % dist print 'Azimuth: %s' % cent_ev_gd[1] print pdata_events[i] enum += 1
fid.write("% Start Time = " + time.strftime("%d %B %Y at %H:%M:%S") + "\n") print "Searching repeaters for station " + station_nm + " ..." for k in range(0,N): master = read( file_list[k] ) master.filter('bandpass', freqmin=low_f , freqmax=high_f, corners=n_poles, zerophase=True ) #master.filter('highpass', freq=low_f , corners=n_poles, zerophase=True ) kevnm_master = master[0].stats.sac.kevnm.rstrip() log_line = 'k = ' + str(k) + ' out of ' + str(N) print log_line if (k-1)%100 == 0: fid_log = open(log_file, 'a') fid_log.write(log_line + '\n') fid_log.close() if no_limit == False: eq_station = gps2DistAzimuth(master[0].stats.sac.evla, master[0].stats.sac.evlo, \ master[0].stats.sac.stla, master[0].stats.sac.stlo) if master[0].stats.sac.dist > eq_sta_dist_limit: continue for n in range(k+1, N): test = read( file_list[n] ) test.filter('bandpass', freqmin=low_f , freqmax=high_f, corners=n_poles, zerophase=True ) #test.filter('highpass', freq=low_f , corners=n_poles, zerophase=True ) eq_dist = 9999. # Declare to a large value if no_limit == False: eq_dist = gps2DistAzimuth(master[0].stats.sac.evla, master[0].stats.sac.evlo, \ test[0].stats.sac.evla, test[0].stats.sac.evlo )[0]/1000 if (eq_dist <= eq_distance_threshold) or (no_limit): if p_pick == 'fixed': master_times = [master[0].stats.sac.t6, master[0].stats.sac.t7, master[0].stats.sac.t8] test_times = [test[0].stats.sac.t6, test[0].stats.sac.t7, test[0].stats.sac.t8]
def section_plot(self, assoc_id, files, seconds_ahead=5, record_length=100, channel='Z'): station = self.assoc_db.query( Candidate.sta).filter(Candidate.assoc_id == assoc_id).all() sta_list = [] for sta, in station: sta_list.append(str(sta)) station_single = self.assoc_db.query(Pick.sta).filter( Pick.assoc_id == assoc_id).filter(Pick.locate_flag == None).all() for sta, in station_single: sta_list.append(str(sta)) # print sta_list eve = self.assoc_db.query(Associated).filter( Associated.id == assoc_id).first() # Earthquakes' epicenter eq_lat = eve.latitude eq_lon = eve.longitude # Reading the waveforms ST = Stream() for file in files: st = read(file) ST += st # in case of some seismometer use channel code like BH1, BH2 or BH3, resign the channel code as: if channel == 'E' or channel == 'e': Chan = 'E1' elif channel == 'N' or channel == 'n': Chan = 'N2' elif channel == 'Z' or channel == 'z': Chan = 'Z3' else: print( 'Please input component E, e, N, n, Z, or z, the default is Z') # Calculating distance from headers lat/lon ST_new = Stream() # ;print ST for tr in ST: if tr.stats.channel[2] in Chan and tr.stats.station in sta_list: if tr.stats.starttime.datetime < eve.ot and tr.stats.endtime.datetime > eve.ot: tr.trim( UTCDateTime(eve.ot - timedelta(seconds=seconds_ahead)), UTCDateTime(eve.ot + timedelta(seconds=record_length))) ST_new += tr # print ST_new.__str__(extended=True) while True: ST_new_sta = [] for tr in ST_new: ST_new_sta.append(tr.stats.station) duplicate = list( set([tr for tr in ST_new_sta if ST_new_sta.count(tr) > 1])) if not duplicate: break index = [ i for (i, j) in enumerate(ST_new_sta) if j == duplicate[-1] ] i = 0 while True: if ST_new[index[i]].stats.npts < ST_new[index[i + 1]].stats.npts: del ST_new[index[i]] break elif ST_new[index[i]].stats.npts >= ST_new[index[ i + 1]].stats.npts: del ST_new[index[i + 1]] break # print ST_new.__str__(extended=True) ST_new.detrend('demean') # ST_new.filter('bandpass', freqmin=0.1, freqmax=100) factor = 10 numRows = len(ST_new) segs = [] ticklocs = [] sta = [] circle_x = [] circle_y = [] segs_picks = [] ticklocs_picks = [] for tr in ST_new: dmax = tr.data.max() dmin = tr.data.min() data = tr.data / (dmax - dmin) * factor t = np.arange( 0, round(tr.stats.npts / tr.stats.sampling_rate / tr.stats.delta) ) * tr.stats.delta # due to the float point arithmetic issue, can not use "t=np.arange(0,tr.stats.npts/tr.stats.sampling_rate,tr.stats.delta)" segs.append(np.hstack((data[:, np.newaxis], t[:, np.newaxis]))) lon, lat = self.tt_stations_db_3D.query( Station3D.longitude, Station3D.latitude).filter( Station3D.sta == tr.stats.station).first() distance = int( gps2DistAzimuth(lat, lon, eq_lat, eq_lon)[0] / 1000. ) # gps2DistAzimuth return in meters, convert to km by /1000 # distance=self.assoc_db.query(Candidate.d_km).filter(Candidate.assoc_id==assoc_id).filter(Candidate.sta==tr.stats.station).first()[0]#;print distance,tr.stats.station ticklocs.append(distance) sta.append(tr.stats.station) # DOT plot where picks are picked, notice that for vertical trace plot p is queried from Pick table, s from PickModified table # horizontal trace plot p and s queried from PickModified table if channel == 'Z3': picks_p = self.assoc_db.query( Pick.time).filter(Pick.assoc_id == assoc_id).filter( Pick.sta == tr.stats.station).filter( Pick.chan == tr.stats.channel).filter( Pick.phase == 'P').all() if not picks_p: picks_p = self.assoc_db.query(PickModified.time).filter( PickModified.assoc_id == assoc_id).filter( PickModified.sta == tr.stats.station).filter( PickModified.phase == 'P').all() picks_s = self.assoc_db.query(PickModified.time).filter( PickModified.assoc_id == assoc_id).filter( PickModified.sta == tr.stats.station).filter( PickModified.phase == 'S').all() # print picks_p,picks_s else: picks_p = self.assoc_db.query(PickModified.time).filter( PickModified.assoc_id == assoc_id).filter( PickModified.sta == tr.stats.station).filter( PickModified.phase == 'P').all() picks_s = self.assoc_db.query(PickModified.time).filter( PickModified.assoc_id == assoc_id).filter( PickModified.sta == tr.stats.station).filter( PickModified.phase == 'S').all() # print picks_p,picks_s picks = picks_p + picks_s # picks=self.assoc_db.query(PickModified.time).filter(PickModified.assoc_id==assoc_id).filter(PickModified.sta==tr.stats.station).all() for pick, in picks: index = int( (pick - eve.ot + timedelta(seconds=seconds_ahead)).total_seconds() / tr.stats.delta) # ;print pick,eve.ot,index,len(data) circle_x.append(distance + data[index]) circle_y.append(t[index]) # BAR plot where picks are picked t_picks = np.array([t[index], t[index]]) data_picks = np.array([data.min(), data.max()]) segs_picks.append( np.hstack( (data_picks[:, np.newaxis], t_picks[:, np.newaxis]))) ticklocs_picks.append(distance) tick_max = max(ticklocs) tick_min = min(ticklocs) offsets = np.zeros((numRows, 2), dtype=float) offsets[:, 0] = ticklocs offsets_picks = np.zeros((len(segs_picks), 2), dtype=float) offsets_picks[:, 0] = ticklocs_picks # lines=LineCollection(segs,offsets=offsets,transOffset=None,linewidths=.25,colors=[colorConverter.to_rgba(i) for i in ('b','g','r','c','m','y','k')]) #color='gray' lines = LineCollection(segs, offsets=offsets, transOffset=None, linewidths=.25, color='gray') # lines_picks=LineCollection(segs_picks,offsets=offsets_picks,transOffset=None,linewidths=1,color='r') lines_picks = LineCollection(segs_picks, offsets=offsets_picks, transOffset=None, linewidths=1, color='k') # print sta,ticklocs fig = plt.figure(figsize=(15, 8)) ax1 = fig.add_subplot(111) # ax1.plot(circle_x,circle_y,'o') # blue dots indicating where to cross the waveforms ax1.plot(circle_x, circle_y, 'o', c='gray') x0 = tick_min - (tick_max - tick_min) * 0.1 x1 = tick_max + (tick_max - tick_min) * 0.1 ylim(0, record_length) xlim(0, x1) ax1.add_collection(lines) ax1.add_collection(lines_picks) ax1.set_xticks(ticklocs) ax1.set_xticklabels(sta) ax1.invert_yaxis() ax1.xaxis.tick_top() # ax2 = ax1.twiny() # ax2.xaxis.tick_bottom() plt.setp(plt.xticks()[1], rotation=45) # xlabel('Station (km)') xlabel('channel: ' + channel, fontsize=18) ylabel('Record Length (s)', fontsize=18) # plt.title('Section Plot of Event at %s'%(tr.stats.starttime)) # plt.tight_layout() plt.show()
#Check depth: if(refevlist[i].dep < depmin) or (refevlist[i].dep > depmax): continue #Check magnitude: if(refevlist[i].mag < magmin) or (refevlist[i].mag > magmax): continue #Check epicenter: if(searchmethod == 'box') and ((refevlist[i].lat > latmax) or (refevlist[i].lat < latmin) or (refevlist[i].lon > lonmax) or (refevlist[i].lon < lonmin)): continue #Check epicenter: if(searchmethod == 'cir'): epid=gps2DistAzimuth(refevlist[i].lat,refevlist[i].lon,clat,clon) if(epid[0]/1000.0 > radius): continue #Check epicenter: if(searchmethod == 'pol'): if(point_in_poly(refevlist[i].lon,refevlist[i].lat,geopolylist) == False): continue #All tests passed: list.append(refevlist[i]) #Loop over extracted events: #open output file: