def getServiceStations(client,event,network="*",station="*",channel="*",radius=None,lat=None,lon=None): ''' get all the available stations for the event ''' # if all networks or stations required, get a list logging.info("Searching for all networks/stations in network "+network+" for "+str(event)) if lat is None or lon is None or radius is None: inv=client.get_stations( network=network,station=station,channel=channel, location='*',level='station',starttime=event-1, endtime=event ) else: inv=client.get_stations(latitude=lat,longitude=lon,maxradius=kilometer2degrees(radius),level='station', starttime=event-1,endtime=event) codeList=[] for net in inv: netcode=net.code for sta in net: stacode=sta.code code='.'.join((netcode,stacode)) if code not in codeList: codeList.append(code) codeList.sort() logging.info("A total of "+str(len(codeList))+" potential stations for "+str(event)+" found.") return codeList
def write_event_results(st, net, stack, eve, not_used, comp, inv, paramdic, lat=None, lon=None): st2 = st.select(component=comp) # we will make a csv file with the infor for each channel for the event if not os.path.exists(net + '_results'): os.mkdir(net + '_results') filehand = net + '_results/Results_' + net + '_' + \ comp + '_' + paramdic['phase'] + \ '_' + str(eve['origins'][0]['time'].year) + \ str(eve['origins'][0]['time'].julday) + '_' + \ str(eve['origins'][0]['time'].hour).zfill(2) + \ str(eve['origins'][0]['time'].minute).zfill(2) if lat is not None: filehand += '_' + str(abs(lat)) + '_' + str(abs(lon)) filehand += '.csv' f = open(filehand, 'w') f.write( 'ID, dis, azimuth, depth, mag, amp, shift, corr, used, ptp, snr \n') for idx, tr in enumerate(st2): f.write(tr.id + ', ') coors = inv.get_coordinates(tr.id[:-1] + 'Z') (dis, azi, bazi) = gps2dist_azimuth(coors['latitude'], coors['longitude'], eve.origins[0].latitude, eve.origins[0].longitude) disdeg = kilometer2degrees(dis / 1000.) f.write(str(disdeg) + ', ') f.write(str(azi) + ', ') f.write(str(float(eve['origins'][0]['depth']) / 1000) + ', ') f.write(str(eve.magnitudes[0].mag) + ', ') amp = np.sqrt(np.sum(tr.data**2) / np.sum(stack**2)) f.write(str(amp) + ', ') cc = correlate(tr.data, stack, 20) shift, value = xcorr_max(cc) f.write(str(shift / float(tr.stats.sampling_rate)) + ', ') f.write(str(round(value, 5)) + ', ') if idx in not_used: f.write('Bad, ') else: f.write('Good, ') tr2 = tr.copy() tr2.trim(tr2.stats.starttime, tr2.stats.starttime + 5.) f.write(str(np.ptp(tr.data)) + ', ') f.write(str(np.ptp(tr.data) / np.ptp(tr2.data)) + '\n') f.close() return
def get_event_params(eq_lat,eq_lon): dist_az = gps2dist_azimuth(eq_lat,eq_lon,0,0,a=6371000.0,f=0.0) dist_km = dist_az[0]/1000.0 dist_deg = kilometer2degrees(dist_km) az = dist_az[1] baz = dist_az[2] rotation_angle = -1.0*((baz-180) -90.0) #rotation_angle = -1.0*(az-90.0) return dist_deg,rotation_angle
def get_approximate_circumference(type_hull, location, center): wikidata_code = get_wikidata_code(location) Area = get_city_area(wikidata_code) raggio = kilometer2degrees(np.sqrt(Area / np.pi)) hull_poly_arr = circumference_point(raggio, center) polygon_data = { 'type_polygon': type_hull, 'appoximate_circumference': hull_poly_arr, 'center': center, 'location_info': location } return polygon_data
def calc_spatial_diff(x): temp_df = pd.DataFrame( data=None, columns=['station', 'gcarc', 'az', 'ep_dist']) for _i, station in enumerate(stn_list): stn_lat = self.inv.select(station=station)[0][0].latitude stn_lon = self.inv.select(station=station)[0][0].longitude # first GCARC dist & az gcarc, az, baz = gps2dist_azimuth(stn_lat, stn_lon, x['lat'], x['lon']) gcarc = gcarc / 1000 # turn it into km's # epicentral_dist ep_dist = kilometer2degrees(gcarc) temp_df.loc[_i] = [station, gcarc, az, ep_dist] self.spatial_dict[x['event_id']] = temp_df
def get_data(inv, eve, paramdic, model, client, debug=False): st = Stream() bad_stas = [] for net in inv: for sta in net: for chan in sta: sncl = net.code + '.' + sta.code + '.' + chan.location_code + '.' + chan.code coors = inv.get_coordinates(sncl) (dis, azi, bazi) = gps2dist_azimuth(coors['latitude'], coors['longitude'], eve.origins[0].latitude, eve.origins[0].longitude) disdeg = kilometer2degrees(dis / 1000.) arrivals = model.get_travel_times( source_depth_in_km=eve.origins[0].depth / 1000., distance_in_degree=disdeg, phase_list=paramdic['phase']) if len(arrivals) == 0: break pstime = (eve.origins[0].time + arrivals[0].time) - 40. petime = pstime + paramdic['winlength'] + 50. try: st += client.get_waveforms(net.code, sta.code, chan.location_code, chan.code, pstime, petime, attach_response=False) if debug: print('Got data for: ' + sncl) except: #print('No data for: ' + sncl) bad_stas.append(sncl) #st = choptocommon(st) return st, bad_stas
def plot_map(elon, wlon, nlat, slat, res='c', dpi=300, xpixels=800, add_holocene_volcanoes=False, add_seismic_stations=False, add_ralco=False, add_labels=False, add_faults=False, add_events_pygemadb=False, evstarttime=UTCDateTime(1970, 1, 1), evendtime=UTCDateTime(), dark_background=False, show_plot=True, savedir=None): if dark_background: plt.style.use(['dark_background']) else: plt.style.use(['default']) # add basemap matplotlib fig = plt.figure() ax = plt.axes([0.1, 0.3, 0.9, 0.6]) map = Basemap(llcrnrlon=wlon, urcrnrlon=elon, llcrnrlat=slat, urcrnrlat=nlat, projection='mill', resolution=res, area_thresh=1000000, epsg=4269) if add_events_pygemadb: map.drawparallels(np.arange(slat, nlat, abs(nlat - slat) / 6.), dashes=[1, 2], labels=[1, 0, 0, 0], linewidth=0.004, fontsize=6, zorder=100, labelstyle='+/-') map.drawmeridians(np.arange(wlon, elon, abs(wlon - elon) / 6.), dashes=[1, 2], labels=[0, 0, 1, 0], linewidth=0.004, fontsize=6, zorder=100, labelstyle='+/-') else: map.drawparallels(np.arange(slat, nlat, abs(nlat - slat) / 6.), dashes=[1, 2], labels=[1, 1, 0, 0], linewidth=0.004, fontsize=6, zorder=100, labelstyle='+/-') map.drawmeridians(np.arange(wlon, elon, abs(wlon - elon) / 6.), dashes=[1, 2], labels=[0, 0, 1, 1], linewidth=0.004, fontsize=6, zorder=100, labelstyle='+/-') map.drawcoastlines(linewidth=0.8, zorder=5) map.drawcountries(linewidth=0.5, linestyle='-', zorder=2) map.arcgisimage(service='World_Shaded_Relief', xpixels=xpixels, dpi=dpi, verbose=False, zorder=1) img = map.arcgisimage(service='ESRI_Imagery_World_2D', xpixels=xpixels, dpi=dpi, verbose=False, zorder=0) img.set_alpha(0.5) if add_events_pygemadb: map.fillcontinents(color='0.1', lake_color='steelblue', alpha=0.4, zorder=1000 - 1) alpha = 1.0 color_stations = 'c' color_volcanoes = 'r' color_faults = 'k' color_places = 'g' # add volcanoes if add_holocene_volcanoes: vnames, xvolcs, yvolcs = load_volcanoes() xvolcs, yvolcs = map(xvolcs, yvolcs) ax.plot(xvolcs, yvolcs, marker='^', color='None', markeredgecolor=color_volcanoes, markeredgewidth=1.5, lw=0., ms=4.5, zorder=10, alpha=alpha, clip_on=True) if add_labels: for x, y, vname in zip(xvolcs, yvolcs, vnames): if vname == 'Tolhuaca': ax.annotate('Vn. ' + vname + '\n', (x, y), color='k', weight='bold', fontsize=4.5, ha='center', va='bottom', clip_on=True, zorder=2000 + 3, fontstyle='normal') elif vname == 'Copahue': ax.annotate(' Vn. ' + vname, (x, y), color='k', weight='bold', fontsize=4.5, ha='left', va='top', clip_on=True, zorder=2000 + 3, fontstyle='normal') elif vname == 'Callaqui': ax.annotate(' Vn. ' + vname, (x, y), color='k', weight='bold', fontsize=4.5, ha='left', va='bottom', clip_on=True, zorder=2000 + 3, fontstyle='normal') elif vname == 'Trolon': ax.annotate('Vn. ' + vname + ' ', (x, y), color='k', weight='bold', fontsize=4.5, ha='right', va='bottom', clip_on=True, zorder=2000 + 3, fontstyle='normal') elif vname == 'Lonquimay': ax.annotate(' Vn. ' + vname, (x, y), color='k', weight='bold', fontsize=4.5, ha='left', va='bottom', clip_on=True, zorder=2000 + 3, fontstyle='normal') # add stations if add_seismic_stations: networks, stations, stlons, stlats, stalts = load_station_metadata() for net, stat, lon, lat in zip(networks, stations, stlons, stlats): x, y = map(float(lon), float(lat)) ax.scatter(x, y, marker='s', color='None', s=20, zorder=2000 + 2, alpha=alpha, clip_on=True, lw=1.4, edgecolors=color_stations) if add_labels: ax.annotate("\n\n " + stat, (x, y), weight='bold', fontsize=5, ha='left', va='center', clip_on=True, zorder=2000 + 3) # add ralco if add_ralco: si, sj = map(-71.611683, -37.910428) map.scatter(si, sj, c=color_places, linewidths=0.3, edgecolors='k', alpha=alpha, zorder=15, marker='X', s=30, clip_on=True) if add_labels: ax.annotate('Pangue ', xy=(si, sj), ha='right', va='top', color='k', zorder=15, fontsize=4.5, fontweight='bold', xytext=(-0, -0), textcoords='offset points', fontstyle='normal', clip_on=True) si, sj = map(-71.475571, -38.046040) map.scatter(si, sj, c=color_places, linewidths=0.3, edgecolors='k', alpha=alpha, zorder=15, marker='X', s=30, clip_on=True) if add_labels: ax.annotate('Ralco ', xy=(si, sj), ha='right', va='top', color='k', zorder=15, fontsize=4.5, fontweight='bold', xytext=(-0, -0), textcoords='offset points', fontstyle='normal', clip_on=True) # add faults if add_faults: PYGEMA_PATH = "%s/pygema" % (site.getsitepackages()[0]) shp = map.readshapefile( PYGEMA_PATH + '/src/shapes/gerd/fallas_sielfeld_etal_2019_mod', 'fallas', drawbounds=False) types = np.unique( np.array([ info['Name'] for info, shape in zip(map.fallas_info, map.fallas) ])) #print("types of faults =", types) for info, shape in zip(map.fallas_info, map.fallas): if info['Name'] == 'LOFS': x, y = zip(*shape) map.plot(x, y, marker=None, color=color_faults, alpha=alpha, linestyle='-', linewidth=0.5, zorder=9, clip_on=True) elif info['Name'] == 'ATF': x, y = zip(*shape) map.plot(x, y, marker=None, color=color_faults, alpha=alpha, linestyle='--', linewidth=0.5, zorder=9, clip_on=True) # add seismicity if add_events_pygemadb: events_list = select_events_manual_loc(evstarttime, evendtime, table="LOC") zmin = 0 zmax = 30 cmap = plt.cm.jet_r #plt.cm.jet_r #plt.cm.gnuplot_r nlevels = 10 bounds = np.linspace(zmin, zmax, nlevels, endpoint=True) levels = np.linspace(zmin, zmax, nlevels, endpoint=True) norm = mpl.colors.BoundaryNorm(bounds, cmap.N) x = [] y = [] z = [] s = [] xerr = [] yerr = [] zerr = [] for event in events_list: evlon = event[1] evlat = event[2] evdep = event[3] evmag = event[4] evdx = event[5] evdy = event[6] evdz = event[7] xi, yi = map(evlon, evlat) evdx_deg = kilometer2degrees(evdx) evdy_deg = kilometer2degrees(evdy) xi_new, yi_new = map(evlon + evdx_deg, evlat + evdy_deg) xi_err = abs(xi - xi_new) yi_err = abs(yi - yi_new) x.append(xi) y.append(yi) z.append(evdep) s.append(evmag**3 * 1.5) xerr.append(xi_err) yerr.append(yi_err) zerr.append(evdz) ax.scatter(x, y, c=z, cmap=cmap, s=s, marker='o', norm=norm, zorder=1000, alpha=1, clip_on=True, lw=0.3, edgecolors='k') #ax.errorbar(x, y, xerr=xerr, yerr=yerr, elinewidth=0.4, ecolor='0.4', capsize=1.7, capthick=0.4, linewidth=0, zorder=19) # add legend s1 = plt.scatter([], [], c='None', linewidths=1.4, edgecolors=color_stations, alpha=alpha, zorder=1000, marker='s', s=15) s2, = plt.plot([], [], c='None', markeredgecolor=color_volcanoes, markeredgewidth=1.4, lw=0., alpha=alpha, zorder=1000, marker='^', ms=4) f1, = plt.plot([-100, -99], [-100, -98], c=color_faults, linewidth=0.8, alpha=alpha, zorder=1000, ls='-') # darkgoldenrod f2, = plt.plot([-100, -99], [-100, -98], c=color_faults, linewidth=0.8, alpha=alpha, zorder=1000, ls='--') # darkgoldenrod leg = plt.legend([s1, s2, f1, f2], [ 'Estación Sísmica', 'Volcán', 'Sistema de Fallas\nLiquiñe-Ofqui', 'Falla Biobio' ], fontsize=4, ncol=1, frameon=True, fancybox=True, shadow=False, framealpha=0.6, loc=4) leg.set_zorder(1000) if add_events_pygemadb: ax2 = plt.axes([0.362, 0.1, 0.375, 0.17]) ax2.minorticks_on() ax2.tick_params(axis='both', which='major', labelsize=6, bottom='on', top='on', left='on', right='on', direction='in') ax2.tick_params(axis='both', which='minor', labelsize=6, bottom='on', top='on', left='on', right='on', direction='in') ax2.tick_params(axis='x', which='major', labelsize=6, bottom='on', top='on', left='on', right='on', direction='in', rotation=0) ax2.tick_params(axis='x', which='minor', labelsize=6, bottom='on', top='on', left='on', right='on', direction='in', rotation=0) ax2.spines['right'].set_visible(True) ax2.spines['top'].set_visible(True) ax2.spines['left'].set_visible(True) ax2.spines['bottom'].set_visible(True) ax2.set_xlim(wlon, elon) ax2.set_ylim(zmax, zmin) labels = [ r"%.1f$^{\circ}$" % (item) for item in ax2.get_xticks().tolist() ] ax2.set_xticklabels(labels) ax2.set_ylabel("Prof. (km)", fontsize=6) x = [] y = [] z = [] s = [] xerr = [] yerr = [] zerr = [] for event in events_list: evlon = event[1] evlat = event[2] evdep = event[3] evmag = event[4] evdx = event[5] evdy = event[6] evdz = event[7] evdx_deg = kilometer2degrees(evdx) evdy_deg = kilometer2degrees(evdy) xi_new, yi_new = evlon + evdx_deg, evlat + evdy_deg xi_err = abs(evlon - xi_new) yi_err = abs(evlat - yi_new) x.append(evlon) y.append(evlat) z.append(evdep) s.append(evmag**3 * 1.5) xerr.append(xi_err) yerr.append(yi_err) zerr.append(evdz) ax2.scatter(x, z, c=z, cmap=cmap, s=s, marker='o', norm=norm, zorder=20, alpha=0.8, clip_on=True, lw=0.3, edgecolors='k') #ax2.errorbar(x, z, xerr=xerr, yerr=zerr, elinewidth=0.4, ecolor='0.4', capsize=1.7, capthick=0.4, linewidth=0, zorder=19) ax3 = plt.axes([0.76, 0.3, 0.15, 0.6]) ax3.minorticks_on() ax3.yaxis.tick_right() ax3.xaxis.tick_top() ax3.tick_params(axis='both', which='major', labelsize=6, bottom='on', top='on', left='on', right='on', direction='in') ax3.tick_params(axis='both', which='minor', labelsize=6, bottom='on', top='on', left='on', right='on', direction='in') ax3.tick_params(axis='x', which='major', labelsize=6, bottom='on', top='on', left='on', right='on', direction='in', rotation=0) ax3.tick_params(axis='x', which='minor', labelsize=6, bottom='on', top='on', left='on', right='on', direction='in', rotation=0) ax3.spines['right'].set_visible(True) ax3.spines['top'].set_visible(True) ax3.spines['left'].set_visible(True) ax3.spines['bottom'].set_visible(True) ax3.set_xlim(zmin, zmax) ax3.set_ylim(slat, nlat) labels = [ r"%.1f$^{\circ}$" % (item) for item in ax3.get_yticks().tolist() ] ax3.set_yticklabels(labels) ax3.set_xlabel("Profundidad (km)", fontsize=6) ax3.xaxis.set_label_position('top') x = [] y = [] z = [] s = [] xerr = [] yerr = [] zerr = [] for event in events_list: evlon = event[1] evlat = event[2] evdep = event[3] evmag = event[4] evdx = event[5] evdy = event[6] evdz = event[7] evdx_deg = kilometer2degrees(evdx) evdy_deg = kilometer2degrees(evdy) xi_new, yi_new = evlon + evdx_deg, evlat + evdy_deg xi_err = abs(evlon - xi_new) yi_err = abs(evlat - yi_new) x.append(evlon) y.append(evlat) z.append(evdep) s.append(evmag**3 * 1.5) xerr.append(xi_err) yerr.append(yi_err) zerr.append(evdz) ax3.scatter(z, y, c=z, cmap=cmap, s=s, marker='o', norm=norm, zorder=20, alpha=0.8, clip_on=True, lw=0.3, edgecolors='k') #ax3.errorbar(z, y, xerr=zerr, yerr=yerr, elinewidth=0.4, ecolor='0.4', capsize=1.7, capthick=0.4, linewidth=0, zorder=19) ml2 = plt.scatter([], [], marker='o', color='w', s=1.5**3 * 1.5, zorder=2000 + 2, alpha=1, clip_on=True, lw=1, edgecolors='k') ml3 = plt.scatter([], [], marker='o', color='w', s=2.5**3 * 1.5, zorder=2000 + 2, alpha=1, clip_on=True, lw=1, edgecolors='k') ml4 = plt.scatter([], [], marker='o', color='w', s=3.5**3 * 1.5, zorder=2000 + 2, alpha=1, clip_on=True, lw=1, edgecolors='k') ml5 = plt.scatter([], [], marker='o', color='w', s=4.5**3 * 1.5, zorder=2000 + 2, alpha=1, clip_on=True, lw=1, edgecolors='k') leg = plt.legend([ml5, ml4, ml3, ml2, s1, s2, f1, f2], ['4.5', '3.5', '2.5', '1.5'], title=r"M$_l$", title_fontsize=6, labelspacing=1.2, fontsize=6, ncol=1, frameon=True, fancybox=True, shadow=False, framealpha=0.2, bbox_to_anchor=(0.55, -0.03)) leg.set_zorder(1000) frame = leg.get_frame() frame.set_facecolor('w') sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm) sm._A = [] cax = fig.add_axes([0.87, 0.108, 0.017, 0.14]) cb1 = plt.colorbar(sm, format='%i', extend='neither', norm=norm, spacing='proportional', orientation='vertical', cax=cax, ticks=np.linspace(zmin, zmax, 5)) cb1.ax.invert_yaxis() cb1.set_label('Profundidad (km)', size=6) for j in cb1.ax.get_yticklabels(): j.set_fontsize(6) if dark_background: ax.patch.set_facecolor('k') else: ax.patch.set_facecolor('w') if savedir: outdir = "%s" % (savedir) if not os.path.isdir(outdir): os.makedirs(outdir) figname = "%s/map.jpg" % (outdir) plt.savefig(figname, dpi=300, bbox_inches='tight', transparent=False) if not show_plot: plt.close('all') if show_plot: plt.show() plt.close('all')
def make_earthquake_list(param_dict,**kwargs): ''' Generate earthquakes to be used in tomographic inversion. Earthquake locations can be generated randomly within the distance range (deltamin, deltamax), or alternatively a ring of earthquakes at a fixed distance from the point (0,0) can be generated. In the futures, events may be given as an obspy event catalog. args-------------------------------------------------------------------------- param_dict: parameter dictionary (read from file 'inparam_tomo') kwargs------------------------------------------------------------------------ nevents: number of earthquakes (only use if geometry is 'random') deltamin = minimum earthquake distance from (0,0) (only if geometry is 'random') deltamax = maximum earthquake distance from (0,0) (only if geometry is 'random') ringdist = distance of ring from (0,0). Can be tuple for multiple rings. (only if geometry is 'ring') dtheta = spacing between earthquakes in ring, given in degrees. Default = 30. ''' geometry = param_dict['event_geometry'] nevents = param_dict['nevents'] depth = param_dict['depth'] deltamin = param_dict['deltamin'] deltamax = param_dict['deltamax'] ringdist = param_dict['ringdist'] dtheta = param_dict['dtheta'] lat0 = kwargs.get('lat0',0.0) lon0 = kwargs.get('lon0',0.0) eq_list = [] n = 1 if geometry=='random': while len(eq_list) < nevents: lon = (2.0*deltamax*np.random.random(1)) - deltamax lat = (2.0*deltamax*np.random.random(1)) - deltamax dist_az = gps2dist_azimuth(lat,lon,lat0,lon0,a=6371000.0,f=0.0) dist_km = dist_az[0]/1000.0 dist_deg = kilometer2degrees(dist_km) if dist_deg >= deltamin and dist_deg <= deltamax: eq_list.append((n,lon[0],lat[0],depth)) n += 1 elif geometry=='ring': theta = np.arange(0,360,dtheta) origin = geopy.Point(0,0) eq_list = [] if type(ringdist)==int or type(ringdist)==float: d_km = ringdist * ((6371.0*2*np.pi)/360.0) for i in range(0,len(theta)): bearing = theta[i] destination = VincentyDistance(kilometers=d_km).destination(origin,bearing) lat = destination[0] lon = destination[1] eq_list.append((n,lon,lat,depth)) n += 1 elif type(ringdist==tuple): for r in ringdist: d_km = r * ((6371.0*2*np.pi)/360.0) for i in range(0,len(theta)): bearing = theta[i] destination = VincentyDistance(kilometers=d_km).destination(origin,bearing) lat = destination[0] lon = destination[1] eq_list.append((n,lon,lat,depth)) n += 1 np.savetxt('earthquake_list',eq_list,fmt=['%d','%5.5f','%5.5f','%5.5f']) return eq_list
def write_input(eq_lat,eq_lon,eq_dep,ievt,stations,phase,delays_file,Tmin,taup_model,filename,raytheory=False,tt_from_raydata=True,**kwargs): ''' write an input file for globalseis finite frequency tomography software. each earthquake and datatype (P,S,etc...) has it's own input file args-------------------------------------------------------------------------- eq_lat: earthquake latitude (deg) eq_lon: earthquake longitude (deg) eq_dep: earthquake depth (km) stations: stations array (lons,lats) delays_file: h5py datafile containing cross correlation delay times Tmin: minimum period at which cross correlation measurements were made taup_model: name of TauPyModel used to calculate 1D travel times filename: raytheory: True or False tt_from_raydata: If True, writes cross correlation times to 'xcor*', which will then be added to 1D travel times from raydata kwargs------------------------------------------------------------------------ plot_figure: plot a figure showing source receiver geometry and delay map t_sig: estimated standard error in cross correlation measurement. add_noise: add gaussian noise to traveltime measurements of magnitude t_sig fake_SKS_header: test the SKS header ''' #define variables used in finite frequency tomography (kwargs)---------------- idate = kwargs.get('idate','15001') #event date YYDDD where DDD is between 1 and 365 iotime = kwargs.get('iotime','010101') #vent origin time (HHMMSS) kluster = kwargs.get('kluster','0') #0 if no clustering used stationcode = kwargs.get('stationcode','XXXX') #station code (no more than 16 chars) netw = kwargs.get('netw','PLUMENET ') #network code nobst = kwargs.get('nobst','1') #number of travel time measurements nobsa = kwargs.get('nobsa','0') #number of amplitude measurements kpole = kwargs.get('kpole','0') #number of polar crossings (0 for P and S) sampling_rate = kwargs.get('sampling_rate',10.0) n_bands = kwargs.get('n_bands',1) # spectral bands used (TODO setup more than one) kunit = kwargs.get('kunit',1) #unit of noise (1 = nm) rms0 = kwargs.get('rms0',0) #don't know what this is plot_figure = kwargs.get('plot_figure',False) dist_min = kwargs.get('dist_min',30.0) dist_max = kwargs.get('dist_max',90.0) t_sig = kwargs.get('t_sig',0.0) add_noise = kwargs.get('add_noise',False) fake_SKS_header = kwargs.get('fake_SKS_header',False) filter_type = kwargs.get('filter_type','none') ievt=int(ievt) #double check ievt is an integer (in case it was read from a file) debug = False #create taup model------------------------------------------------------------ tt_model = TauPyModel(taup_model) #get filter parameters-------------------------------------------------------- print 'Tmin = ', Tmin filter_type, freqmin,freqmax, window = get_filter_params(delays_file,phase,Tmin,filter_type=filter_type) omega,amp = get_filter_freqs(filter_type,freqmin,freqmax,sampling_rate) window_len = window[1] - window[0] #write header----------------------------------------------------------------- f = open(filename,'w') f.write('{}'.format(filename)+'\n') f.write('{}'.format('None'+'\n')) fdelays = open('xcor_{}'.format(filename),'w') #ray information-------------------------------------------------------------- if phase == 'P': gm_component = 'BHZ ' #ground motion component f.write('P'+'\n') f.write('P'+'\n') f.write('6371 1 1'+'\n') f.write('3482 2 1'+'\n') f.write('6371 5 0'+'\n') elif phase == 'S' and fake_SKS_header == False: gm_component = 'BHT ' #ground motion component f.write('S'+'\n') f.write('S'+'\n') f.write('6371 1 2'+'\n') f.write('3482 2 2'+'\n') f.write('6371 5 0'+'\n') elif phase == 'SKS' or fake_SKS_header == True: gm_component = 'BHR ' #ground motion component f.write('SKS'+'\n') f.write('SKS'+'\n') f.write('6371 1 2'+'\n') f.write('3482 4 1'+'\n') f.write('1217.1 2 1'+'\n') f.write('3482 4 2'+'\n') f.write('6371 5 0'+'\n') #this is hardwired for now (based on range of rays found with ray tracing software) #TODO make distance range more adaptable if phase == 'P': dist_min = 30.0 #dist_max = 98.3859100 dist_max = 97.0 elif phase == 'S': dist_min = 30.0 #dist_max = 99.0557175 dist_max = 97.0 elif phase == 'SKS': #dist_min = 66.0320663 #dist_max = 144.349365 dist_min = 68.0 dist_max = 142.0 #write spectral band information----------------------------------------------- if raytheory: n_bands=0 f.write('{}'.format(n_bands)+'\n') else: f.write('{}'.format(n_bands)+'\n') f.write('{}'.format(len(omega))+'\n') for i in range(0,len(omega)): f.write('{} {}'.format(omega[i],amp[i])+'\n') #event delay map-------------------------------------------------------------- #lats_i = np.arange(-30.0,30.0,0.1) #lons_i = np.arange(-30.0,30.0,0.1) lats_i = np.arange(-45.0,45.0,0.1) lons_i = np.arange(-45.0,45.0,0.1) if plot_figure: event_map,figure_axis = make_event_delay_map(eq_lat,eq_lon,phase,delays_file,Tmin,lats_i=lats_i,lons_i=lons_i,plot=True,return_axis=False,nevent=ievt) else: if debug: print 'func:write_input- making event delay map for', phase #print 'eq_lat,eq_lon,phase,Tmin lats_i,lons_i',eq_lat,eq_lon,phase,Tmin,lats_i,lons_i event_map = make_event_delay_map(eq_lat,eq_lon,phase,delays_file,Tmin,lats_i=lats_i, lons_i=lons_i,return_axis=False,plot=True,nevent=ievt) #find delays at stations------------------------------------------------------ if plot_figure: station_delays = get_station_delays(event_map,stations,lats_i,lons_i,pass_figure_axis=True,figure_axis=figure_axis) else: station_delays = get_station_delays(event_map,stations,lats_i,lons_i) #add noise (optional)--------------------------------------------------------- if t_sig != 0: noise = np.random.normal(0,t_sig,len(station_delays)) if add_noise: station_delays += noise station_lons = stations[0,:] station_lats = stations[1,:] n_stations = len(station_lats) station_elevation = 0.0 for i in range(0,n_stations): dist_deg, rotation_angle = get_event_params(eq_lat,eq_lon) #find event distance event_distaz = gps2dist_azimuth(eq_lat,eq_lon,station_lats[i],station_lons[i],a=6371000.0,f=0.0) event_dist_deg = kilometer2degrees((event_distaz[0]/1000.0)) #skip station if too close or too far from source if event_dist_deg <= dist_min or event_dist_deg >= dist_max: continue #get ray theoretical travel time #if phase == 'S': # phase_list = ['s','S','Sdiff'] #elif phase == 'P': # phase_list = ['p','P','Pdiff'] ray_theory_arr = tt_model.get_travel_times(eq_dep,event_dist_deg,phase_list=[phase]) ### TRY TO GET TRAVEL TIME IN CORE ######################################################### ray_theory_path = tt_model.get_ray_paths(eq_dep,event_dist_deg,phase_list=[phase]) phase_path = ray_theory_path[0] path_time = phase_path.path['time'] path_dt = np.diff(path_time) path_depth = phase_path.path['depth'] time_in_core = 0 for p_i in range(0,len(path_dt)): if path_depth[p_i] >= 2889.0: time_in_core += path_dt[p_i] ############################################################################################ if debug: print '_________________________________________________________________________________' print 'arrivals from taup_get_travel_time for event parameters [depth,delta(deg),phase]:' print '[{},{},{}]'.format(eq_dep,event_dist_deg,phase),ray_theory_arr print 'time in core: ', time_in_core print '_________________________________________________________________________________' ray_theory_travel_time = ray_theory_arr[0].time delay_time = station_delays[i] tobs = ray_theory_travel_time - delay_time if debug: print 'distance, phase, raytheory travel time, observed delay:', event_dist_deg,phase,ray_theory_travel_time,delay_time print 'the travel time observation is ', tobs fdelays.write('{}'.format(delay_time)+'\n') if raytheory: n_bands = 0 nbt = 0 #spectral band number (must be 0 if ray theory) window_len = 0 kunit = 0 corcoeft=0 else: nbt = 1 #spectral band number #write line 1-------------------------------------------------------------- f.write('{} {} {} {} {} {} {} {} {} {} {} {} {} {} {} {}'.format(idate, iotime,ievt,kluster,stationcode,netw,gm_component,eq_lat,eq_lon,eq_dep, station_lats[i],station_lons[i],station_elevation,nobst,nobsa,kpole)+'\n') #write line 2-------------------------------------------------------------- f.write('{} {} '.format(kunit,rms0)) for j in range(0,n_bands+1): f.write('0') #used to be 0.0 f.write('\n') #write line 3--------------------------------------------------------------- if raytheory: f.write('{}'.format(1)+'\n') else: f.write('{}'.format(n_bands)+'\n') #write line 4--------------------------------------------------------------- corcoeft = 1.0 # cross correlation coefficient f.write('{} {} {} {} {} {} {}'.format(tobs,t_sig,corcoeft,nbt,window_len,time_in_core,'#tobs,tsig,corcoeft,nbt,window,tincore')+'\n') #write line 5-------------------------------------------------------------- f.write('{}'.format(0)+'\n')
def get_domain(lat_source, lon_source, lat_max_in_, lat_min_in_, lon_max_in_, lon_min_in_, dimension, dchosen=50): lat_max_in = lat_max_in_ lat_min_in = lat_min_in_ if (abs(lat_min_in - lat_max_in) < 1e-3): lat_min_in -= 0.1 lon_max_in = lon_max_in_ lon_min_in = lon_min_in_ if (abs(lon_min_in - lon_max_in) < 1e-3): lon_min_in -= 0.1 factor = 0 dshift = 15000. #dchosen = 80 diff = abs(lat_max_in_ - lat_min_in_) if diff < 0.25: lat_max_in = lat_max_in_ + diff / 2. lat_min_in = lat_min_in_ - diff / 2. diff = abs(lon_max_in_ - lon_min_in_) if diff < 0.25: lon_max_in = lon_max_in_ + diff / 2. lon_min_in = lon_min_in_ - diff / 2. dlon, dlat = abs(lon_max_in - lon_min_in) / dchosen, abs(lat_max_in - lat_min_in) / dchosen lat_max, lat_min = degrees2kilometers( lat_max_in) * 1000., degrees2kilometers(lat_min_in) * 1000. lon_max, lon_min = degrees2kilometers( lon_max_in) * 1000., degrees2kilometers(lon_min_in) * 1000. dx, dy, dz = abs(lon_max - lon_min) / dchosen, abs(lat_max - lat_min) / dchosen, 200. xmin, xmax = lon_min - factor * dy - dshift, lon_max + factor * dy + dshift ymin, ymax = lat_min - factor * dx - dshift, lat_max + factor * dx + dshift zmax = 30000. ## Transform domain to make x and y powers of two xmin_, xmax_, dx_ = transform_domain_power2(xmin, xmax, dx) #ymin_, ymax_, dy_ = transform_domain_power2(ymin, ymax, dy) xmin, xmax, dx = xmin_, xmax_, dx_ #ymin, ymax, dy = ymin_, ymax_, dy_ #int(2**nextpow2((xmax-xmin)/dx)) if dimension == 3: if abs(dy) < 1e-5: dy = (ymax - ymin) / 10 ## DEFAULT VALUE ymin_, ymax_, dy_ = transform_domain_power2(ymin, ymax, dy) ymin, ymax, dy = ymin_, ymax_, dy_ yy = np.arange(ymin, ymax, dy) ymin = yy[0] ymax = yy[-1] loc_ = np.argmin(abs(yy)) if abs(yy[loc_]) < 1e-5: ymax -= yy[loc_] ymin -= yy[loc_] ## OLD before Jul 13 2020 dx, dy = abs(xmax - xmin) / dchosen, abs(ymax - ymin) / dchosen domain = {} domain.update({'origin': (lat_source, lon_source)}) domain.update({ 'latmin': lat_source + kilometer2degrees(ymin / 1000.), 'latmax': lat_source + kilometer2degrees(ymax / 1000.) }) domain.update({ 'lonmin': lon_source + kilometer2degrees(xmin / 1000.), 'lonmax': lon_source + kilometer2degrees(xmax / 1000.) }) domain.update({'xmin': xmin, 'xmax': xmax}) domain.update({'ymin': ymin, 'ymax': ymax}) domain.update({'zmin': 0., 'zmax': zmax}) domain.update({'dx': dx, 'dy': dy, 'dz': dz}) return domain
def getangles(net, sta, loc, stime, etime, debug=False, plot=False): # Input net, station, location, starttime, endtime # grab the station info, then the events # We loop through the events and do the calculations on each of the events for the station # we append the results to the mess file and finally print it out mess = [] # Get the latitude and longitude inv = client.get_stations(network=net, station=sta, channel='BH*', level="response", location=loc, starttime=stime, endtime=etime) stalat = inv[0][0][0].latitude stalon = inv[0][0][0].longitude staele = inv[0][0][0].elevation if debug: print('Station lat:' + str(stalat) + ' station lon:' + str(stalon) + ' station ele:' + str(staele)) # Get our list of events cat = client.get_events(starttime=stime, minmagnitude=6., latitude=stalat, longitude=stalon, maxradius=90., minradius=30., endtime=etime, mindepth=60) print(cat) for idx, eve in enumerate(cat): print('On event: ' + str(idx + 1) + ' of ' + str(len(cat))) (dis, bazi, azi) = gps2dist_azimuth(stalat, stalon, eve.origins[0].latitude, eve.origins[0].longitude) if debug: print('Here is the back-azimuth: ' + str(bazi) + ' here is the azimuth: ' + str(azi)) # dis is in m dis = kilometer2degrees(dis / 1000.) arrivals = model.get_travel_times( source_depth_in_km=eve.origins[0].depth / 1000., distance_in_degree=dis, phase_list=['P']) arrivals = [arrivals[0]] arrivalsS = model.get_travel_times( source_depth_in_km=eve.origins[0].depth / 1000., distance_in_degree=dis, phase_list=['S']) arrivals.append(arrivalsS[0]) print(arrivals) # If we have multiple phase skip to the next event if len(arrivals) < 2: continue if debug: print(arrivals) print(eve) # We need to also get a Noise window phasestime = (eve.origins[0].time + arrivals[0].time) - 10. phaseetime = (eve.origins[0].time + arrivals[0].time) - 5. print(phasestime) print(phaseetime) try: #if True: st = client.get_waveforms(net, sta, loc, 'BH*', phasestime, phaseetime) except: print('Unable to get data') continue st.detrend('constant') st.remove_sensitivity(inventory=inv) if debug: print('Here is the stime ' + str(phasestime) + ' here is the end time ' + str(phaseetime)) # from here we can estimate the SNR noise = sum(st.std()) if debug: print('Here is the noise:' + str(noise)) # So now we have two events one P and one S for arrival in arrivals: phasestime = (eve.origins[0].time + arrival.time) - 5. phaseetime = (eve.origins[0].time + arrival.time) + 15. if debug: print(phasestime) print(phaseetime) try: st = client.get_waveforms(net, sta, loc, 'BH*', phasestime, phaseetime) except: continue print('Was not able to get data') st.detrend('constant') st.detrend('linear') #st.merge(fill_value=0) st.remove_sensitivity(inventory=inv) st.taper(0.05) st.rotate(method="->ZNE", inventory=inv) st.rotate(method="NE->RT", back_azimuth=bazi) print('Here we are') for tr in st: tr.data *= 10.**6 if plot: fig = plt.figure(1, figsize=(12, 12)) t = np.arange(st[0].stats.npts) / st[0].stats.sampling_rate for idx in range(len(st)): plt.subplot(3, 1, 1 + idx) plt.plot(t, st[idx].data, label=st[idx].id, color='k') plt.axvspan(5., 10., alpha=0.5) plt.xlim(min(t), max(t)) plt.ylim(-max(np.abs(st.max())) * 1.1, max(np.abs(st.max())) * 1.1) plt.legend() if idx == 1: plt.ylabel('Velocity ($\mu m/s$)') plt.xlabel('Time (s)') plt.show() plt.clf() # Add figure save and label #st.trim(st[0].stats.starttime+5., st[0].stats.starttime+10.) signal = sum(st.std()) if debug: print('Here is the signal:' + str(signal)) print(st) print('Here is the SNR:' + str(signal / noise)) st.sort(reverse=True) azies, inces, aziesE, incesE = particle_motion_odr(st) if debug: print('Here is the azies: ' + str(azies) + ' +/-' + str(aziesE)) print('Here is the inces: ' + str(inces) + ' +/-' + str(inces)) #st.rotate(method="NE->RT", back_azimuth=bazi) angle, scale = decomppca(st, arrival.name) if debug: print('Here is the angle: ' + str(angle) + ' here is the scale: ' + str(scale)) if plot: fig = plt.figure(2, figsize=(12, 12)) ax = plt.gca(projection='3d') ax.plot3D(st[0].data, st[1].data, st[2].data, color='gray', alpha=.5) plt.show() #plt.clf() # Add figure save and label if debug: print('Here is PCA: ' + str(angle) + ' Here is odr:' + str(azies)) if debug: print('Here is the azies:' + str(azies) + ' here is azimuth:' + str(azi)) print('Here is the inces:' + str(inces) + ' here is incident:' + str(arrival.incident_angle)) print(arrival.name) mess.append({ 'SNR': signal / noise, 'azi': azi, 'Phase': arrival.name, 'Incident': arrival.incident_angle, 'IncidentE': inces, 'AziE': azies, 'Year': eve.origins[0].time.year, 'Jday': eve.origins[0].time.julday, 'PCA': angle, 'Lambda': +scale, 'Ray_Param': arrival.ray_param_sec_degree }) return mess
for ii in range(N): tp=np.zeros((Neq,Nstations)) ts=np.zeros_like(tp) so=np.zeros_like(tp) #inputs = [input_list for x in range(MC)] #num_cores =multiprocessing.cpu_count()-2 #results = Parallel(n_jobs=num_cores)(delayed(GetRayTracingPar)(i) for i in inputs) for eq_index,rowEq in eqdf.iterrows(): eq_coords = np.array([rowEq.x,rowEq.y,rowEq.z]) for st_index,rowSt in stdf.iterrows(): # Get the time for P,S arrivals and offset offset = np.sqrt((rowSt.x-eq_coords[0])**2 +(rowSt.y-eq_coords[1])**2)/1000.0 arrivals = model.get_travel_times(source_depth_in_km=eq_coords[2]/1000.0, distance_in_degree=kilometer2degrees(offset), receiver_depth_in_km=0.01,phase_list=['p']) p=arrivals[0].time tp[eq_index,st_index]=p #print ' Done with station %d and eq %d ' % (st_index,eq_index) print ' Done with iter %d ' % (ii) t1 = time.time() total = t1-t0 print 'Total time is %3.6f s' % total
nzyear = sac.stats.sac.nzyear nzjday = sac.stats.sac.nzjday nzhour = sac.stats.sac.nzhour nzmin = sac.stats.sac.nzmin #making strings nzyear_str = str(nzyear) nzjday_str = str(nzjday) nzhour_str = str(nzhour) nzmin_str = str(nzmin) #devido a problemas na distancia dos dados, calculamos se este parece errado gcarc = sac.stats.sac.gcarc if gcarc >= 180 or gcarc <= 10: distance_m = gps2dist_azimuth(stla, stlo, evla, evlo) distance_km = (distance_m[0]) / 1000 gcarc = kilometer2degrees(distance_km) else: pass kevnm = sac.stats.sac.kevnm #formato de data recebido pelo programa de inversao date_format = nzyear_str[-2:] + nzjday_str.zfill(3) + nzhour_str.zfill( 2) + nzmin_str.zfill(2) + phase_sufix #calculando o t0 e o take-off angle (toa) try: tt = getTravelTimes(delta=gcarc, depth=evdp, model=model) taup_time = tt[0].get("time") toa = tt[0].get("take-off angle") except:
def main(args): random.seed(datetime.now()) if args.n_distances < 1: args.n_distances = None # print distance classifications if args.n_distances != None: print 'dist_class, dist_deg, dist_km' for dclass in range(0, args.n_distances, 1): dist_deg = util.classification2distance(dclass, args.n_distances) dist_km = geo.degrees2kilometers(dist_deg) print "{} {:.2f} {:.1f}".format(dclass, dist_deg, dist_km) print '' if args.n_magnitudes < 1: args.n_magnitudes = None # print magtitude classifications if args.n_magnitudes != None: print 'mag_class, mag' for mclass in range(0, args.n_magnitudes, 1): mag = util.classification2magnitude(mclass, args.n_magnitudes) print "{} {:.2f}".format(mclass, mag) print '' if args.n_depths < 1: args.n_depths = None # print depth classifications if args.n_depths != None: print 'depth_class, depth' for dclass in range(0, args.n_depths, 1): depth = util.classification2depth(dclass, args.n_depths) print "{} {:.1f}".format(dclass, depth) print '' if args.n_azimuths < 1: args.n_azimuths = None # print azimuth classifications if args.n_azimuths != None: print 'azimuth_class, azimuth' for aclass in range(0, args.n_azimuths, 1): azimuth = util.classification2azimuth(aclass, args.n_azimuths) print "{} {:.1f}".format(aclass, azimuth) print '' if not os.path.exists(args.outpath): os.makedirs(args.outpath) # save arguments with open(os.path.join(args.outpath, 'params.pkl'), 'w') as file: file.write(pickle.dumps(args)) # use `pickle.loads` to do the reverse for dataset in ['train', 'validate', 'test']: for datatype in ['events', 'noise']: datapath = os.path.join(args.outpath, dataset, datatype) if not os.path.exists(datapath): os.makedirs(datapath) mseedpath = os.path.join(datapath, 'mseed') if not os.path.exists(mseedpath): os.makedirs(mseedpath) mseedpath = os.path.join(datapath, 'mseed_raw') if not os.path.exists(mseedpath): os.makedirs(mseedpath) if datatype == 'events': xmlpath = os.path.join(datapath, 'xml') if not os.path.exists(xmlpath): os.makedirs(xmlpath) # read catalog of events #filenames = args.event_files_path + os.sep + '*.xml' catalog_dict = {} catalog_all = [] for dirpath, dirnames, filenames in os.walk(args.event_files_path): for name in filenames: if name.endswith(".xml"): file = os.path.join(dirpath, name) catalog = read_events(file) target_count = int(args.event_fraction * float(catalog.count())) print catalog.count(), 'events:', 'read from:', file, 'will use:', target_count, 'since args.event_fraction=', args.event_fraction if (args.event_fraction < 1.0): while catalog.count() > target_count: del catalog[random.randint(0, catalog.count() - 1)] if not args.systematic: tokens = name.split('_') net_sta = tokens[0] + '_' + tokens[1] if not net_sta in catalog_dict: catalog_dict[net_sta] = catalog else: catalog_dict[net_sta] += catalog # sort catalog by date catalog_dict[net_sta] = Catalog(sorted(catalog_dict[net_sta], key=lambda e: e.origins[0].time)) else: catalog_all += catalog # read list of channels to use inventory_full = read_inventory(args.channel_file) inventory_full = inventory_full.select(channel=args.channel_prefix+'Z', sampling_rate=args.sampling_rate) #print(inventory) client = fdsn.Client(args.base_url) # get existing already processed event channel dictionary try: with open(os.path.join(args.outpath, 'event_channel_dict.pkl'), 'r') as file: event_channel_dict = pickle.load(file) except IOError: event_channel_dict = {} print 'Existing event_channel_dict size:', len(event_channel_dict) n_noise = int(0.5 + float(args.n_streams) * args.noise_fraction) n_events = args.n_streams - n_noise n_validate = int(0.5 + float(n_events) * args.validation_fraction) n_test = int(0.5 + float(n_events) * args.test_fraction) n_train = n_events - n_validate - n_test n_count = 0; n_streams = 0 if args.systematic: event_ndx = 0 net_ndx = 0 sta_ndx = 0 channel_ndx = -1 # distance_id_count = {} # max_num_for_distance_id = {} # if args.n_distances != None: # # train # distance_id_count['train'] = [0] * args.n_distances # max_num_for_distance_id['train'] = 1 + int(2.0 * float(n_train) / float(args.n_distances)) # print 'Maximum number events for each distance bin train:', max_num_for_distance_id['train'] # # validate # distance_id_count['validate'] = [0] * args.n_distances # max_num_for_distance_id['validate'] = 1 + int(2.0 * float(n_validate) / float(args.n_distances)) # print 'Maximum number events for each distance bin validate:', max_num_for_distance_id['validate'] # # test # distance_id_count['test'] = [0] * args.n_distances # max_num_for_distance_id['test'] = 1 + int(2.0 * float(n_test) / float(args.n_distances)) # print 'Maximum number events for each distance bin test:', max_num_for_distance_id['test'] while args.systematic or n_streams < args.n_streams: try: # choose event or noise is_noise = n_streams >= n_events # reset validate test count if switching from event to noise if n_streams == n_events: n_validate = int(0.5 + float(n_noise) * args.validation_fraction) n_test = int(0.5 + float(n_noise) * args.test_fraction) n_train = n_noise - n_validate - n_test n_count = 0; # set out paths if is_noise: datatype = 'noise' else: datatype = 'events' if n_count < n_train: dataset = 'train' elif n_count < n_train + n_validate: dataset = 'validate' else: dataset = 'test' datapath = os.path.join(args.outpath, dataset, datatype) # get random channel from Inventory #inventory = inventory_full.select(time=origin.time) inventory = inventory_full if args.systematic: try: catalog, event_ndx, event, origin, channel, net_ndx, net, sta_ndx, sta, channel_ndx \ = get_systematic_channel(inventory, catalog_all, is_noise, event_ndx, net_ndx, sta_ndx, channel_ndx) except ValueError: break else: try: catalog, event_ndx, event, origin, channel, net_ndx, net, sta_ndx, sta, channel_ndx = get_random_channel(inventory, catalog_dict, is_noise) except ValueError: continue distance_id = 0 distance = -999.0 magnitude = -999.0 depth = -999.0 azimuth = -999.0 if not is_noise: dist_meters, azim, bazim = geo.gps2dist_azimuth(channel.latitude, channel.longitude, origin.latitude, origin.longitude, a=geo.WGS84_A, f=geo.WGS84_F) distance = geo.kilometer2degrees(dist_meters / 1000.0, radius=6371) azimuth = azim magnitude = event.preferred_magnitude().mag depth = origin.depth / 1000.0 if args.n_distances != None: distance_id = util.distance2classification(distance, args.n_distances) # if distance_id_count[dataset][distance_id] >= max_num_for_distance_id[dataset]: # print 'Skipping event_channel: distance bin', distance_id, 'for', dataset, 'already full:', \ # distance_id_count[dataset][distance_id], '/', max_num_for_distance_id[dataset] # continue print '' print 'Event:', origin.time.isoformat(), event.event_descriptions[0].text, \ ', Dist(deg): {:.2f} Dist(km): {:.1f} ID: {}'.format(distance, geo.degrees2kilometers(distance), distance_id), \ ', Mag: {:.2f}'.format(magnitude), \ ', Depth(km): {:.1f}'.format(depth), \ ', Az(deg): {:.1f}'.format(azimuth) print 'Retrieving channels:', (n_streams + 1), '/ ', args.n_streams, (', NOISE, ' if is_noise else ', EVENT, '), 'event', event_ndx, origin.time, \ ', net', net_ndx, ', sta', sta_ndx, ', chan', channel_ndx, \ ', ', net.code, sta.code, \ channel.code, channel.location_code, \ channel.sample_rate # check station was available at origin.time if not sta.is_active(time=origin.time): print 'Skipping event_channel: station not active at origin.time:' continue #key = str(event_ndx) + '_' + str(net_ndx) + '_' + str(sta_ndx) + '_' + str(channel_ndx) + '_' + str(is_noise) key = str(event_ndx) + '_' + net.code + '_' + sta.code + '_' + channel.code + '_' + str(is_noise) if key in event_channel_dict: print 'Skipping event_channel: already processed.' continue event_channel_dict[key] = 1 # get start time for waveform request ttime = get_first_P_travel_time(origin, channel) arrival_time = origin.time + ttime if is_noise: # get start time of next event event2 = catalog[event_ndx + 1] origin2 = event2.preferred_origin() # check that origins are at least min time apart if origin2.time - origin.time < MIN_INTER_EVENT_TIME: print 'Skipping noise event_channel: inter event time too small: ', str(origin2.time - origin.time), \ origin2.time, origin.time continue ttime2 = get_first_P_travel_time(origin2, channel) arrival_time2 = origin2.time + ttime2 arrival_time = (arrival_time + ((arrival_time2 - arrival_time) / 2.0)) - args.window_start start_time = arrival_time - args.window_start # request data for 3 channels #for orientation in ['Z', 'N', 'E', '1', '2']: # req_chan = args.channel_prefix + orientation channel_name = net.code + '_' + sta.code + '_' + channel.location_code + '_' + args.channel_prefix padded_start_time = start_time - WINDOW_PADDING_FDSN padded_end_time = start_time + args.window_length + 2.0 * WINDOW_PADDING_FDSN chan_param = args.channel_prefix + '?' # kluge to get url used for data request kwargs = {'network': net.code, 'station': sta.code, 'location': channel.location_code, 'channel': chan_param, 'starttime': padded_start_time, 'endtime': padded_end_time} #url = client._create_url_from_parameters('dataselect', DEFAULT_PARAMETERS['dataselect'], **kwargs) url = fdsn.client.build_url(client.base_url, 'dataselect', client.major_versions['dataselect'], "query", parameters=kwargs) print ' java net.alomax.seisgram2k.SeisGram2K', '\"', url, '\"' try: stream = client.get_waveforms( \ net.code, sta.code, channel.location_code, chan_param, \ padded_start_time, padded_end_time, \ attach_response=True) except fdsn.header.FDSNException as ex: print 'Skipping channel:', channel_name, 'FDSNException:', ex, continue print stream # TEST # for trace in stream: # print '==========> trace.stats', trace.stats # check some things if (len(stream) != 3): print 'Skipping channel: len(stream) != 3:', channel_name continue ntrace = 0 for trace in stream: if (len(trace) < 1): print 'Skipping trace: len(trace) < 1:', channel_name continue if (trace.stats.starttime > start_time or trace.stats.endtime < start_time + args.window_length): print 'Skipping trace: does not contain required time window:', channel_name continue ntrace += 1 if (ntrace != 3): print 'Skipping channel: ntrace != 3:', channel_name continue # pre-process streams # sort so that channels will be ingested in NN always in same order ENZ stream.sort(['channel']) # detrend - this is meant to be equivalent to detrend or a long period low-pass (e.g. at 100sec) applied to real-time data stream.detrend(type='linear') for trace in stream: # correct for required sampling rate if abs(trace.stats.sampling_rate - args.sampling_rate) / args.sampling_rate > 0.01: trace.resample(args.sampling_rate) # apply high-pass filter if requested if args.hp_filter_freq > 0.0: stream.filter('highpass', freq=args.hp_filter_freq, corners=args.hp_filter_corners) # check signal to noise ratio, if fail, repeat on 1sec hp data to capture local/regional events in longer period microseismic noise sn_type = 'BRB' first_pass = True; while True: if is_noise: snrOK = True else: snrOK = False for trace in stream: # slice with 1sec margin of error for arrival time to: 1) avoid increasing noise amplitude with signal, 2) avoid missing first P in signal if (first_pass): signal_slice = trace.slice(starttime=arrival_time - 1.0, endtime=arrival_time - 1.0 + args.snr_window_length) noise_slice = trace.slice(endtime=arrival_time - 1.0) else: # highpass at 1sec filt_trace = trace.copy() filt_trace.filter('highpass', freq=1.0, corners=4) signal_slice = filt_trace.slice(starttime=arrival_time - 1.0, endtime=arrival_time - 1.0 + args.snr_window_length) noise_slice = filt_trace.slice(endtime=arrival_time - 1.0) sn_type = '1HzHP' # check signal to noise around arrival_time # ratio of std asignal = signal_slice.std() anoise = noise_slice.std() snr = asignal / anoise print trace.id, sn_type, 'snr:', snr, 'std_signal:', asignal, 'std_noise:', anoise # ratio of peak amplitudes (DO NOT USE, GIVE UNSTABLE RESULTS!) # asignal = signal_slice.max() # anoise = noise_slice.max() # snr = np.absolute(asignal / anoise) # print trace.id, sn_type, 'snr:', snr, 'amax_signal:', asignal, 'amax_noise:', anoise if is_noise: snrOK = snrOK and snr <= MAX_SNR_NOISE if not snrOK: break else: snrOK = snrOK or snr >= args.snr_accept if (first_pass and not snrOK and args.hp_filter_freq < 0.0): first_pass = False; continue else: break if (not snrOK): if is_noise: print 'Skipping channel:', sn_type, 'snr >', MAX_SNR_NOISE, 'on one or more traces:', channel_name else: print 'Skipping channel:', sn_type, 'snr < args.snr_accept:', args.snr_accept, 'on all traces:', channel_name continue # trim data to required window # try to make sure samples and start/end times align as closely as possible to first trace trace = stream.traces[0] trace = trace.slice(starttime=start_time, endtime=start_time + args.window_length, nearest_sample=True) start_time = trace.stats.starttime stream = stream.slice(starttime=start_time, endtime=start_time + args.window_length, nearest_sample=True) cstart_time = '%04d.%02d.%02d.%02d.%02d.%02d.%03d' % \ (start_time.year, start_time.month, start_time.day, start_time.hour, start_time.minute, \ start_time.second, start_time.microsecond // 1000) # process each trace try: for trace in stream: # correct for overall sensitivity or gain trace.normalize(trace.stats.response.instrument_sensitivity.value) trace.data = trace.data.astype(np.float32) # write miniseed #tracefile = os.path.join(datapath, 'mseed', trace.id + '.' + cstart_time + '.mseed') #trace.write(tracefile, format='MSEED', encoding='FLOAT32') #print 'Channel written:', tracefile, trace.count(), 'samples' except AttributeError as err: print 'Skipping channel:', channel_name, ': Error applying trace.normalize():' , err filename_root = channel_name + '.' + cstart_time # write raw miniseed streamfile = os.path.join(datapath, 'mseed_raw', filename_root + '.mseed') stream.write(streamfile, format='MSEED', encoding='FLOAT32') print 'Stream written:', stream.count(), 'traces:' print ' java net.alomax.seisgram2k.SeisGram2K', streamfile # store absolute maximum stream_max = np.absolute(stream.max()).max() # normalize by absolute maximum stream.normalize(global_max = True) # 20180521 AJL # spherical coordinates # raw data always in same order ENZ # tensor indexing is [traces, datapoints, comps] if args.spherical: rad2deg = 180.0 / math.pi # calculate modulus temp_square = np.add(np.square(stream.traces[0].data), np.add(np.square(stream.traces[1].data), np.square(stream.traces[2].data))) temp_modulus = np.sqrt(temp_square) # calculate azimuth temp_azimuth = np.add( np.multiply(np.arctan2(stream.traces[0].data, stream.traces[1].data), rad2deg), 180.0) # calculate inclination temp_inclination = np.multiply(np.arcsin(np.divide(stream.traces[2].data, temp_modulus)), rad2deg) # reset stream data to spherical coordinates stream.traces[0].data = temp_inclination stream.traces[1].data = temp_azimuth temp_modulus = np.multiply(temp_modulus, 100.0) # increase scale for plotting purposes stream.traces[2].data = temp_modulus # put absolute maximum normalization in first element of data array, to seed NN magnitude estimation # 20180816 AJL - do not mix max with data # for trace in stream: # trace.data[0] = stream_max print 'stream_max', stream_max # write processed miniseed streamfile = os.path.join(datapath, 'mseed', filename_root + '.mseed') stream.write(streamfile, format='MSEED', encoding='FLOAT32') print 'Stream written:', stream.count(), 'traces:' print ' java net.alomax.seisgram2k.SeisGram2K', streamfile # write event waveforms and distance_id in .tfrecords magnitude_id = 0 depth_id = 0 azimuth_id = 0 if not is_noise: # if args.n_distances != None: # distance_id_count[dataset][distance_id] += 1 if args.n_magnitudes != None: magnitude_id = util.magntiude2classification(magnitude, args.n_magnitudes) if args.n_depths != None: depth_id = util.depth2classification(depth, args.n_depths) if args.n_azimuths != None: azimuth_id = util.azimuth2classification(azimuth, args.n_azimuths) else: distance_id = -1 distance = 0.0 output_name = filename_root + '.tfrecords' output_path = os.path.join(datapath, output_name) writer = DataWriter(output_path) writer.write(stream, stream_max, distance_id, magnitude_id, depth_id, azimuth_id, distance, magnitude, depth, azimuth) if not is_noise: print '==== Event stream tfrecords written:', output_name, \ 'Dist(deg): {:.2f} Dist(km): {:.1f} ID: {}'.format(distance, geo.degrees2kilometers(distance), distance_id), \ ', Mag: {:.2f} ID: {}'.format(magnitude, magnitude_id), \ ', Depth(km): {:.1f} ID: {}'.format(depth, depth_id), \ ', Az(deg): {:.1f} ID: {}'.format(azimuth, azimuth_id) else: print '==== Noise stream tfrecords written:', output_name, 'ID: Dist {}, Mag {}, Depth {}, Az {}'.format(distance_id, magnitude_id, depth_id, azimuth_id) # write event data if not is_noise: filename = os.path.join(datapath, 'xml', filename_root + '.xml') event.write(filename, 'QUAKEML') n_streams += 1 n_count += 1 except KeyboardInterrupt: print 'Stopping: KeyboardInterrupt' break except Exception as ex: print 'Skipping stream: Exception:', ex traceback.print_exc() continue print n_streams, 'streams:', 'written to:', args.outpath # save event_channel_dict with open(os.path.join(args.outpath, 'event_channel_dict.pkl'), 'w') as file: file.write(pickle.dumps(event_channel_dict))
def getangles(net, sta, loc, stime, etime, debug=True): # Input net, station, location, starttime, endtime # grab the station info, then the events # We loop through the events and do the calculations on each of the events for the station # we append the results to the mess file and finally print it out mess = [] # Get the latitude and longitude inv = client.get_stations(network=net, station=sta, channel='BH*', level="response", location=loc, starttime=stime, endtime=etime) stalat = inv[0][0][0].latitude stalon = inv[0][0][0].longitude staele = inv[0][0][0].elevation if debug: print('Station lat:' + str(stalat)) print('Station lon:' + str(stalon)) print('Station ele:' + str(staele)) # Get our list of events cat = client.get_events(starttime=stime, minmagnitude=6., latitude=stalat, longitude=stalon, maxradius=90., minradius=30., endtime=etime, mindepth=30) for idx, eve in enumerate(cat): print('On event: ' + str(idx + 1) + ' of ' + str(len(cat))) (dis, bazi, azi) = gps2dist_azimuth(stalat, stalon, eve.origins[0].latitude, eve.origins[0].longitude) # bazi is the station to event azimuth (back azimuth) # azi is the azimuth from the event to the station if debug: print('Here is the back-azimuth: ' + str(bazi)) print('Here is the azimuth: ' + str(azi)) dis = kilometer2degrees(dis / 1000.) arrivals = model.get_travel_times( source_depth_in_km=eve.origins[0].depth / 1000., distance_in_degree=dis, phase_list=['P', 'S']) # If we have multiple phase skip to the next event if len(arrivals) != 2: continue if debug: print(arrivals) print(eve) # We need to also get a Noise window phasestime = (eve.origins[0].time + arrivals[0].time) - 10. phaseetime = (eve.origins[0].time + arrivals[0].time) - 5. try: st = client.get_waveforms(net, sta, loc, 'BH*', phasestime, phaseetime) except: continue st.detrend('constant') st.remove_sensitivity(inventory=inv) if debug: print(phasestime) print(phaseetime) # from here we can estimate the SNR noise = sum(st.std()) if debug: print('Here is the noise:' + str(noise)) # So now we have two events one P and one S for arrival in arrivals: phasestime = (eve.origins[0].time + arrival.time) + 0. phaseetime = (eve.origins[0].time + arrival.time) + 5. if debug: print(phasestime) print(phaseetime) try: st = client.get_waveforms(net, sta, loc, 'BH*', phasestime, phaseetime) except: continue st.detrend('constant') #st.detrend('linear') #st.merge(fill_value=0) st.remove_sensitivity(inventory=inv) #st.taper(0.05) st.rotate(method="->ZNE", inventory=inv) signal = sum(st.std()) if debug: print('Here is the signal:' + str(signal)) print(st) print('Here is the SNR:' + str(signal / noise)) azies, inces, aziesE, incesE = particle_motion_odr(st) if debug: print('Here is the azies: ' + str(azies) + ' +/-' + str(aziesE)) print('Here is the inces: ' + str(inces) + ' +/-' + str(inces)) st.rotate(method="NE->RT", back_azimuth=bazi) angle, scale = decomppca(st, arrival.name) if debug: print('Here is PCA: ' + str(angle) + ' Here is odr:' + str(azies)) if debug: print('Here is the azies:' + str(azies) + ' here is azimuth:' + str(azi)) print('Here is the inces:' + str(inces) + ' here is incident:' + str(arrival.incident_angle)) print(arrival.name) mess.append({ 'SNR': signal / noise, 'azi': azi, 'Phase': arrival.name, 'Incident': arrival.incident_angle, 'IncidentE': inces, 'AziE': azies, 'Year': eve.origins[0].time.year, 'Jday': eve.origins[0].time.julday, 'PCA': angle, 'Lambda': +scale, 'Ray_Param': arrival.ray_param_sec_degree }) return mess
def pretty_plot(st, stack, eve, not_used, comp, inv, paramdic, debug=False): st2 = st.select(component=comp) diss = [] # compute distances for tr in st2: coors = inv.get_coordinates(tr.id[:-1] + 'Z') (dis, azi, bazi) = gps2dist_azimuth(coors['latitude'], coors['longitude'], eve.origins[0].latitude, eve.origins[0].longitude) disdeg = kilometer2degrees(dis / 1000.) diss.append(disdeg) if debug: print(diss) mdiss = min(diss) Mdiss = max(diss) ptp = np.ptp(stack) ran = 0.3 * (Mdiss - mdiss) * ptp fig = plt.figure(1, figsize=(12, 12)) tithand = st[0].stats.network + ' ' + paramdic['phase'] + '-Wave ' if comp == 'R': tithand += ' Radial ' elif comp == 'Z': tithand += ' Vertical ' elif comp == 'T': tithand += ' Transverse ' tithand += str(eve['origins'][0]['time'].year) + ' ' tithand += str(eve['origins'][0]['time'].julday) + ' ' tithand += str(eve['origins'][0]['time'].hour).zfill(2) + ':' + str( eve['origins'][0]['time'].minute).zfill(2) mag = eve.magnitudes[0].mag magstr = eve.magnitudes[0].magnitude_type if 'Lg' in magstr: magstr = 'mb_{Lg}' gmax, gmin = -100., 500. tithand += ' $' + magstr + '$=' + str(mag) plt.title(tithand) for pair in zip(diss, st2): t = pair[1].times() if max(pair[1].data / ran + pair[0]) > gmax: gmax = max(pair[1].data / ran + pair[0]) if min(pair[1].data / ran + pair[0]) < gmin: gmin = min(pair[1].data / ran + pair[0]) p = plt.plot(t, pair[1].data / ran + pair[0]) plt.text(min(t) + 1., pair[0] - +.2, (pair[1].id)[:-4].replace('.', ' '), color=p[0].get_color()) plt.plot(t, stack / ran + pair[0], color='k', alpha=0.5, linewidth=3) plt.plot([10., 10.], [0., 2 * Mdiss + ran], color='k', linewidth=3) plt.ylim((gmin - 0.02 * gmin, gmax + 0.02 * gmax)) plt.xlim((min(t), max(t))) plt.xlabel('Time (s)') plt.ylabel('Distance (deg)') if not os.path.exists(st[0].stats.network + '_results'): os.mkdir(st[0].stats.network + '_results') plt.savefig(st[0].stats.network + '_results/' + st[0].stats.network + '_' + comp + '_' + str(eve['origins'][0]['time'].year) + str(eve['origins'][0]['time'].julday) + '_' + str(eve['origins'][0]['time'].hour).zfill(2) + str(eve['origins'][0]['time'].minute).zfill(2) + '.png', format='PNG', dpi=400) #plt.show() plt.clf() plt.close() return
def get_domain(lat_source, lon_source, lat_max_in_, lat_min_in_, lon_max_in_, lon_min_in_, dimension, nkxky=2**6): if (type(nkxky) == int or (type(nkxky) == list and len(nkxky) == 1)): nkxkyDifferent = False nkx = nkxky del nkxky elif (type(nkxky) == list and len(nkxky) == 2): if (dimension == 2): raise ValueError( '[%s] Cannot put two values for nkxky when dimension = 2. Specify only one number, for the one horizontal dimension.' % (sys._getframe().f_code.co_name)) else: nkxkyDifferent = True nkx = nkxky[0] nky = nkxky[1] del nkxky else: raise ValueError( '[%s] nkxky must be a either: an integer, a list of length 1, or a list of length 2.' % (sys._getframe().f_code.co_name)) lat_max_in = lat_max_in_ lat_min_in = lat_min_in_ lon_max_in = lon_max_in_ lon_min_in = lon_min_in_ # # Check input. # if(abs(lat_min_in-lat_max_in) < 1e-3): # # If range is too small, lower lower bound. # lat_min_in -= 0.1 # if(abs(lon_min_in-lon_max_in) < 1e-3): # # If range is too small, lower lower bound. # lon_min_in -= 0.1 # # Check input. # diff_y = abs(lat_max_in_ - lat_min_in_) # diff_x = abs(lon_max_in_ - lon_min_in_) # if diff_y < 0.25: # # If range is too small, increase symmetrically. # lat_max_in = lat_max_in_ + diff_y/2. # lat_min_in = lat_min_in_ - diff_y/2. # if diff_x < 0.25: # # If range is too small, increase symmetrically. # lon_max_in = lon_max_in_ + diff_x/2. # lon_min_in = lon_min_in_ - diff_x/2. # dlon, dlat = abs(lon_max_in-lon_min_in)/dchosen, abs(lat_max_in-lat_min_in)/dchosen # Cast lat/lon_min/max in meters relative to source. # lat_max, lat_min = degrees2kilometers(lat_max_in)*1000., degrees2kilometers(lat_min_in)*1000. # lon_max, lon_min = degrees2kilometers(lon_max_in)*1000., degrees2kilometers(lon_min_in)*1000. # lat_min = rwau.haversine(lon_source, lat_source, lon_source, lat_source+lat_min_in)[0][0]*1e3 * np.sign(lat_min_in) # lat_max = rwau.haversine(lon_source, lat_source, lon_source, lat_source+lat_max_in)[0][0]*1e3 * np.sign(lat_max_in) # lon_min = rwau.haversine(lon_source, lat_source, lon_source+lon_min_in, lat_source)[0][0]*1e3 * np.sign(lon_min_in) # lon_max = rwau.haversine(lon_source, lat_source, lon_source+lon_max_in, lat_source)[0][0]*1e3 * np.sign(lon_max_in) lat_min = gps2dist_azimuth(lat_source, lon_source, lat_source + lat_min_in, lon_source)[0] * np.sign(lat_min_in) lat_max = gps2dist_azimuth(lat_source, lon_source, lat_source + lat_max_in, lon_source)[0] * np.sign(lat_max_in) lon_min = gps2dist_azimuth(lat_source, lon_source, lat_source, lon_source + lon_min_in)[0] * np.sign(lon_min_in) lon_max = gps2dist_azimuth(lat_source, lon_source, lat_source, lon_source + lon_max_in)[0] * np.sign(lon_max_in) # Compute dx dy as from chosen nkxky. # dx, dy, dz = abs(lon_max-lon_min)/dchosen, abs(lat_max-lat_min)/dchosen, 200. if (nkxkyDifferent): dx, dy = abs(lon_max - lon_min) / nkx, abs(lat_max - lat_min) / nky else: dx, dy = abs(lon_max - lon_min) / nkx, abs(lat_max - lat_min) / nkx # # Add a safety margin. # factor = 0 # Margin in number of elements to be added to either side of the domain. # dshift = 0. # Margin in m to be added to either side of the domain. # xmin, xmax = lon_min - factor*dy - dshift, lon_max + factor*dy + dshift # ymin, ymax = lat_min - factor*dx - dshift, lat_max + factor*dx + dshift # # zmax = 30000. xmin, xmax = lon_min, lon_max ymin, ymax = lat_min, lat_max # # Transform domain to make x a power of two. # xmin_, xmax_, dx_ = transform_domain_power2(xmin, xmax, dx) # xmin, xmax, dx = xmin_, xmax_, dx_ # Check y span (only if using 3D). if (dimension == 3): if (abs(dy) < 1e-5): # dy = (ymax-ymin)/10 ## DEFAULT VALUE raise ValueError('[%s] y span is too small.' % (sys._getframe().f_code.co_name)) # # Transform domain to make y a power of two. # ymin_, ymax_, dy_ = transform_domain_power2(ymin, ymax, dy) # ymin, ymax, dy = ymin_, ymax_, dy_ # # Make mid point exactly zero. # yy = np.arange(ymin, ymax, dy) # ymin = yy[0] # ymax = yy[-1] # loc_ = np.argmin(abs(yy)) # if(abs(yy[loc_]) < 1e-5): # ymax -= yy[loc_] # ymin -= yy[loc_] ## OLD before Jul 13 2020 if (nkxkyDifferent): dx, dy = abs(xmax - xmin) / nkx, abs(ymax - ymin) / nky else: dx, dy = abs(xmax - xmin) / nkx, abs(ymax - ymin) / nkx domain = {} domain.update({'origin': (lat_source, lon_source)}) domain.update({ 'latmin': lat_source + kilometer2degrees(ymin / 1000.), 'latmax': lat_source + kilometer2degrees(ymax / 1000.) }) domain.update({ 'lonmin': lon_source + kilometer2degrees(xmin / 1000.), 'lonmax': lon_source + kilometer2degrees(xmax / 1000.) }) domain.update({'xmin': xmin, 'xmax': xmax}) domain.update({'ymin': ymin, 'ymax': ymax}) # domain.update( {'zmin': 0., 'zmax': zmax} ) # domain.update( {'dx': dx, 'dy': dy, 'dz': dz} ) domain.update({'dx': dx, 'dy': dy}) # (zmin, zmax, dz) should only be defined at the atmospheric model step, when defining the Rayleigh wave field (class field_RW). return domain
def pretty_plot_small(st, stack, eve, not_used, comp, inv, paramdic): st2 = st.select(component=comp) st2 = st2.copy() diss = [] # compute distances for tr in st2: coors = inv.get_coordinates(tr.id[:-1] + 'Z') (dis, azi, bazi) = gps2dist_azimuth(coors['latitude'], coors['longitude'], eve.origins[0].latitude, eve.origins[0].longitude) disdeg = kilometer2degrees(dis / 1000.) diss.append(disdeg) mdiss = min(diss) Mdiss = max(diss) diss = np.arange(float(len(st2))) for tr in st2: tr.data /= np.max(np.abs(stack)) stack /= np.max(np.abs(stack)) ptp = np.ptp(stack) ran = 1. fig = plt.figure(1, figsize=(16, 12)) tithand = st[0].stats.network + ' ' + paramdic['phase'] + '-Wave ' if comp == 'R': tithand += ' Radial ' elif comp == 'Z': tithand += ' Vertical ' elif comp == 'T': tithand += ' Transverse ' tithand += str(eve['origins'][0]['time'].year) + ' ' tithand += str(eve['origins'][0]['time'].julday) + ' ' tithand += str(eve['origins'][0]['time'].hour).zfill(2) + ':' + str( eve['origins'][0]['time'].minute).zfill(2) mag = eve.magnitudes[0].mag magstr = eve.magnitudes[0].magnitude_type if 'Lg' in magstr: magstr = 'mb_{Lg}' tithand += ' $' + magstr + '$=' + str(mag) plt.title(tithand) labs = [] for pair in zip(diss, st2): labs.append((pair[1].id).replace('.', ' ')) t = pair[1].times() if pair[1].max() > np.max(np.abs(stack)) * 3.: p = plt.plot(t, (pair[1].data) / (np.max(np.abs(stack)) * 3.) + pair[0]) plt.text(min(t) + 1., pair[0] + .2, (pair[1].id)[:-4].replace('.', ' ') + ' gain', color=p[0].get_color()) else: p = plt.plot(t, pair[1].data / ran + pair[0]) plt.text(min(t) + 1., pair[0] - +.2, (pair[1].id)[:-4].replace('.', ' '), color=p[0].get_color()) plt.plot(t, stack / ran + pair[0], color='k', alpha=0.5, linewidth=3) plt.yticks(diss, labs) plt.plot([10., 10.], [-1000., 1000.], color='k', linewidth=3) plt.ylim((min(diss) - 1, max(diss) + 1)) plt.xlim((min(t), max(t))) plt.xlabel('Time (s)') plt.ylabel('Station index') if not os.path.exists(st[0].stats.network + '_results'): os.mkdir(st[0].stats.network + '_results') plt.savefig(st[0].stats.network + '_results/' + st[0].stats.network + '_' + comp + '_' + str(eve['origins'][0]['time'].year) + str(eve['origins'][0]['time'].julday) + '_' + str(eve['origins'][0]['time'].hour).zfill(2) + str(eve['origins'][0]['time'].minute).zfill(2) + '.png', format='PNG', dpi=400) plt.clf() plt.close() return
def calculate_ray_coverage(earthquake_list,stations_list,depth_range,phase='S',**kwargs): ''' args: earthquake_list: earthquakes file (same format as used in synth tomo) stations_list: stations file (same format as used in synth tomo) depth_range: tuple (mindepth,maxdepth) kwargs: savefig: True or False figname: str, name of figure (defaults to fig.pdf) plot_title: str, title at the top of the plot (default no title) ''' #the earthquake list format is: eq num, eq lon, eq lat, eq dep #the stations list format is: st lon, st lat savefig = kwargs.get('savefig',True) fig_name = kwargs.get('fig_name','fig.pdf') plot_title = kwargs.get('plot_title','None') fout_name = kwargs.get('fout_name','None') prem = TauPyModel('prem_50km') stations_file = np.loadtxt(stations_list) quakes_file = np.loadtxt(earthquake_list) n_quakes = len(quakes_file) n_stats = len(stations_file) st_lons = stations_file[:,0] st_lats = stations_file[:,1] eq_lons = quakes_file[:,1] eq_lats = quakes_file[:,2] eq_deps = quakes_file[:,3] if phase=='S' or phase=='P': delta_min = 30.0 delta_max = 100.0 elif phase == 'SKS': delta_min = 70.0 delta_max = 140.0 m = Basemap(projection='hammer',lon_0=204) fout = open(fout_name,'w') for i in range(0,n_quakes): #print 'working on earthquake', i for j in range(0,n_stats): geodet = gps2dist_azimuth(eq_lats[i], eq_lons[i], st_lats[j], st_lons[j]) dist_m = geodet[0] dist_deg = kilometer2degrees((dist_m/1000.)) if dist_deg < delta_min: continue elif dist_deg > delta_max: continue az = geodet[1] #print 'eq_lat, eq_lon, st_lat, st_lon, dist_deg', eq_lats[i],eq_lons[i],st_lats[j],st_lons[j],dist_deg arrs = prem.get_pierce_points(source_depth_in_km=eq_deps[i], distance_in_degree=dist_deg, phase_list=[phase]) #print arrs arr = arrs[0] pierce_dict = arr.pierce #items in pierce_dict: 'p' (slowness), 'time' (time in s), 'dist', (distance in rad), 'depth' (depth in km) origin = geopy.Point(eq_lats[i],eq_lons[i]) bearing = az geo_path = [] cross_pt1 = 0 cross_pt2 = 0 dist_max = pierce_dict['dist'][::-1][0] for ds in pierce_dict: #only add points that are past turning depth dist_here = ds[2] if dist_here >= dist_max / 2: time_here = ds[1] depth_here = ds[3] if depth_here == depth_range[1]: dist_deg = np.degrees(ds[2]) dist_km = dist_deg * ((2*np.pi*6371.0/360.0)) geo_pt = VincentyDistance(kilometers=dist_km).destination(origin,bearing) lat_pt = geo_pt[0] lon_pt = geo_pt[1] cross_pt1 = (lon_pt,lat_pt) if depth_here == depth_range[0]: dist_deg = np.degrees(ds[2]) dist_km = dist_deg * ((2*np.pi*6371.0/360.0)) geo_pt = VincentyDistance(kilometers=dist_km).destination(origin,bearing) lat_pt = geo_pt[0] lon_pt = geo_pt[1] cross_pt2 = (lon_pt,lat_pt) if cross_pt1 != 0 and cross_pt2 != 0: m.drawgreatcircle(cross_pt1[0],cross_pt1[1],cross_pt2[0],cross_pt2[1],linewidth=1,alpha=0.15,color='k') fout.write('{} {} {} {}'.format(cross_pt1[0],cross_pt1[1],cross_pt2[0],cross_pt2[1])+'\n') m.drawcoastlines() m.fillcontinents(color='lightgrey') #m.drawparallels(np.arange(-90.,120.,30.)) #m.drawmeridians(np.arange(0.,360.,60.)) if plot_title != 'None': plt.title(plot_title) if savefig: plt.savefig(fig_name) plt.clf() else: plt.show()
DB = os.path.dirname(__file__) PHASEPY = os.path.dirname(DB) DATA = os.path.join(PHASEPY, 'examples', 'data_20130616153750') db_tt = os.path.join(DATA, 'tt_stations_1D.db') db_uri = 'sqlite:///' + db_tt engine = create_engine(db_uri) engine_associator = create_engine(db_uri, echo=False) BaseTT1D.metadata.create_all(engine_associator) session = sessionmaker(bind=engine_associator)() model = TauPyModel(model="iasp91") for k in range(1001, 10001): km = k/2 print('kilometers: ', km, kilometer2degrees(km)) p_arrival = model.get_travel_times(source_depth_in_km=5, distance_in_degree=kilometer2degrees(km), phase_list=['P']) s_arrival = model.get_travel_times(source_depth_in_km=5, distance_in_degree=kilometer2degrees(km), phase_list=['S']) p = p_arrival[0].time s = s_arrival[0].time new_tt = TTtable1D(d_km=km, delta=kilometer2degrees(km), p_tt=p, s_tt=s, s_p=s-p) session.add(new_tt) session.commit()
st_client = Client('IRIS') stas = st_client.get_stations(network=args.nets, station=args.stations, level='response', starttime=cat[0].origins[0].time, channel=args.chan, location=args.loc) model = TauPyModel(model='iasp91') for net in stas: for sta in net: (dis, azi, bazi) = gps2dist_azimuth(sta._latitude, sta._longitude, cat[0].origins[0].latitude, cat[0].origins[0].longitude) deg = kilometer2degrees(dis / 1000.) if args.phases: arrivals = model.get_travel_times( source_depth_in_km=cat[0].origins[0].depth / 1000., distance_in_degree=deg, phase_list=args.phases) else: arrivals = model.get_travel_times( source_depth_in_km=cat[0].origins[0].depth / 1000., distance_in_degree=deg) if len(arrivals) > 0: print("{}_{} Arrivals\n---------------------".format( net.code, sta.code)) for arrival in arrivals: atime = cat[0].origins[0].time + arrival.time timestring = str(atime).split('T')