def ArrayBeamForming(sx, sy, traces, t_i_c, t_e_c, nsta , center, lat, lon, t_o, deltat): ''' ''' PP = np.zeros([sx.size *sy.size]) P = np.zeros([sx.size, sy.size]) dis_x = orthodrome.distance_accurate50m_numpy( lat[center], lon, lat[center], lon[center]) dis_y = orthodrome.distance_accurate50m_numpy( lat, lon[center], lat[center], lon[center]) print "dis_x[center]",dis_x[center] dis_x[center]=0.0 dis_y[center]=0.0 dum_ = np.where( lat > lat[center]) dis_y[dum_] = - 1.0* dis_y[dum_] dum = np.where(lon > lon[center]) dis_x[dum] = - 1.0*dis_x[dum] X = np.array([dis_x, dis_y]) S = np.transpose([np.tile(sx, np.size(sy)), np.repeat(sy, np.size(sx))]) delay = np.dot(S,X) # t0 = time.time() PP = abeam(traces, delay, t_i_c, t_e_c) # t1 = time.time() # Q = Abeam(traces, delay, t_i_c, t_e_c) # PP = Q.trace_beam() # print PP.dtype # t2 = time.time() # print t1 - t0 P = PP.reshape(sx.size, sy.size) P = P.transpose() return P, delay
def array_beam_s(center, lat, lon, Sx, Sy, traces, dum_tr): nsta = np.size(traces) delay = np.zeros(nsta) traces_d = [] for st in range(0,nsta,1): if st != center : dis_x = orthodrome.distance_accurate50m_numpy( lat[center], lon[st], lat[center], lon[center]) dis_y = orthodrome.distance_accurate50m_numpy( lat[st], lon[center], lat[center], lon[center]) if lat[st] > lat[center]: dis_y = -1.0*dis_y if lon[st] > lon[center]: dis_x = -1.0*dis_x delay[st] = dis_x * Sx + dis_y * Sy else: delay[st] = 0.0 dd = traces[st].copy() dd.shift(delay[st]) dum_tr.add(dd) traces_d.append(dd) traces_d.append(dum_tr) return traces_d, delay
def testDistanceArrayPythonC(self): ntest = 10000 locs = self.get_critical_random_locations(ntest) a = orthodrome.distance_accurate50m_numpy(*locs, implementation='python') b = orthodrome.distance_accurate50m_numpy(*locs, implementation='c') num.testing.assert_array_almost_equal(a, b)
def testDistanceArrayPythonC(self): ntest = 10000 locs = self.get_critical_random_locations(ntest) a = orthodrome.distance_accurate50m_numpy( *locs, implementation='python') b = orthodrome.distance_accurate50m_numpy( *locs, implementation='c') num.testing.assert_array_almost_equal(a, b)
def surface_distance(alat, alon, anorth, aeast, blat, blon, bnorth, beast): args = float_array_broadcast(alat, alon, anorth, aeast, blat, blon, bnorth, beast) want_scalar = False if len(args[0].shape) == 0: want_scalar = True args = [num.atleast_1d(x) for x in args] (alat, alon, anorth, aeast, blat, blon, bnorth, beast) = args eqref = num.logical_and(alat == blat, alon == blon) neqref = num.logical_not(eqref) dist = num.empty_like(alat) dist[eqref] = num.sqrt((bnorth[eqref] - anorth[eqref])**2 + (beast[eqref] - aeast[eqref])**2) aalat, aalon = od.ne_to_latlon(alat[neqref], alon[neqref], anorth[neqref], aeast[neqref]) bblat, bblon = od.ne_to_latlon(blat[neqref], blon[neqref], bnorth[neqref], beast[neqref]) dist[neqref] = od.distance_accurate50m_numpy(aalat, aalon, bblat, bblon) if want_scalar: return dist[0] else: return dist
def test_midpoint(self): center_lons = num.linspace(0., 180., 5) center_lats = [0., 89.] npoints = 10000 half_side_length = 1000000. distance_error_max = 50000. for lat in center_lats: for lon in center_lons: n = num.random.uniform( -half_side_length, half_side_length, npoints) e = num.random.uniform( -half_side_length, half_side_length, npoints) dlats, dlons = orthodrome.ne_to_latlon(lat, lon, n, e) clat, clon = orthodrome.geographic_midpoint(dlats, dlons) d = orthodrome.distance_accurate50m_numpy( clat, clon, lat, lon)[0] if plot: import matplotlib.pyplot as plt fig = plt.figure() ax = fig.add_subplot(111) ax.scatter(n, e) c_n, c_e = orthodrome.latlon_to_ne_numpy( lat, lon, clat, clon) ax.plot(c_n, c_e, 'ro') plt.show() self.assertTrue(d < distance_error_max, 'Distance %s > %s' % (d, distance_error_max) + '(maximum error)\n tested lat/lon: %s/%s' % (lat, lon))
def get_distance_hypo(eventi, eventj): ''' Normalized Euclidean hypocentral distance, assuming flat earth to combine epicentral distance and depth difference. The normalization assumes largest considered distance is 1000 km. ''' maxdist_km = 1000. a_lats, a_lons, b_lats, b_lons = \ eventi.north, eventi.east, eventj.north, eventj.east a_dep, b_dep = eventi.down, eventj.down if (a_lats == b_lats) and (a_lons == b_lons) and (a_dep == b_dep): d = 0. else: distance_m = orthodrome.distance_accurate50m_numpy( a_lats, a_lons, b_lats, b_lons) distance_km = distance_m / 1000. ddepth = abs(eventi.down - eventj.down) hypo_distance_km = math.sqrt( distance_km * distance_km + ddepth * ddepth) # maxdist = float(inv_param['EUCLIDEAN_MAX']) d = hypo_distance_km / maxdist_km if d >= 1.: d = 1. return d
def window(lat, lon, lat_source, lon_source, depth_source, nsta, store): center = Array_center(lat, lon) source_reciever_dis = orthodrome.distance_accurate50m_numpy( lat_source, lon_source, lat, lon) t_p = np.array([ store.t("first(p|P)", (depth_source, int(source_reciever_dis[i]))) for i in range(0, nsta)]) t_s = np.array([ store.t("first(s|S)", (depth_source, int(source_reciever_dis[i]))) for i in range(0, nsta)]) t_origin = util.str_to_time('2008-02-17 11:06:01.10') def win_(t_, t_l , t_r, center): wind_i = t_origin + t_ - t_l wind_e = t_origin + t_ + t_r t_o = - t_ + t_[center] return wind_i, wind_e, t_o P_wind_i, P_wind_e, t_op = win_(t_p, 5.0 , 20.0, center) S_wind_i, S_wind_e, t_os = win_(t_s, 2.0 , 18.0, center) return P_wind_i, P_wind_e, t_op , center#, S_wind_i, S_wind_e, t_os
def test_midpoint(self): center_lons = num.linspace(0., 180., 5) center_lats = [0., 89.] npoints = 10000 half_side_length = 1000000. distance_error_max = 50000. for lat in center_lats: for lon in center_lons: n = num.random.uniform(-half_side_length, half_side_length, npoints) e = num.random.uniform(-half_side_length, half_side_length, npoints) dlats, dlons = orthodrome.ne_to_latlon(lat, lon, n, e) clat, clon = orthodrome.geographic_midpoint(dlats, dlons) d = orthodrome.distance_accurate50m_numpy( clat, clon, lat, lon)[0] if plot: import matplotlib.pyplot as plt fig = plt.figure() ax = fig.add_subplot(111) ax.scatter(n, e) c_n, c_e = orthodrome.latlon_to_ne_numpy( lat, lon, clat, clon) ax.plot(c_n, c_e, 'ro') plt.show() self.assertTrue( d < distance_error_max, 'Distance %s > %s' % (d, distance_error_max) + '(maximum error)\n tested lat/lon: %s/%s' % (lat, lon))
def distance_to(self, source): src_lats = num_full_like(self.lats, fill_value=source.lat) src_lons = num_full_like(self.lons, fill_value=source.lon) target_coords = self.get_latlon() target_lats = target_coords[:, 0] target_lons = target_coords[:, 1] return distance_accurate50m_numpy( src_lats, src_lons, target_lats, target_lons)
def distance_to(self, source): src_lats = num_full_like(self.lats, fill_value=source.lat) src_lons = num_full_like(self.lons, fill_value=source.lon) target_coords = self.get_latlon() target_lats = target_coords[:, 0] target_lons = target_coords[:, 1] return distance_accurate50m_numpy( src_lats, src_lons, target_lats, target_lons)
def distances_to(self, receiver): if self.same_origin(receiver): return num.sqrt((self.north_shifts - receiver.north_shift)**2 + (self.east_shifts - receiver.east_shift)**2) else: slats, slons = self.effective_latlons rlat, rlon = receiver.effective_latlon return orthodrome.distance_accurate50m_numpy(slats, slons, rlat, rlon)
def get_radius(self): stations = self.get_stations() if not stations: return self._parent.get_radius() clat, clon = self.get_center_latlon() radii = distance_accurate50m_numpy( clat, clon, [st.effective_lat for st in stations], [st.effective_lon for st in stations]) return float(radii.max())
def get_cities(lat, lon, radius, minpop=0): region = od.radius_to_region(lat, lon, radius) cities = get_cities_region(region, minpop=minpop) clats = num.array([c.lat for c in cities]) clons = num.array([c.lon for c in cities]) dists = od.distance_accurate50m_numpy(lat, lon, clats, clons) order = num.argsort(dists) cities_sorted = [cities[i] for i in order if dists[i] < radius] return cities_sorted
def get_cities(lat, lon, radius, minpop=0): region = od.radius_to_region(lat, lon, radius) cities = get_cities_region(region, minpop=minpop) clats = num.array([c.lat for c in cities]) clons = num.array([c.lon for c in cities]) dists = od.distance_accurate50m_numpy(lat, lon, clats, clons) order = num.argsort(dists) cities_sorted = [cities[i] for i in order if dists[i] < radius] return cities_sorted
def getDistance2Event(self, event): ''' :param event: instanceof pyrocko.model.Event :returns: [float] Distance between given event and self ''' assert isinstance(event, model.Event), 'event not instance of model.Event' try: return orthodrome.distance_accurate50m_numpy( event.lat, event.lon,self.latitude,self.longitude) except TypeError: sys.stdout.write('TypeError. Most likely no latitude,\n\ longitude for station:'), self.network, self.station
def distance_to(self, other): ''' Compute distance [m] to other location object. ''' if self.same_origin(other): return math.sqrt((self.north_shift - other.north_shift)**2 + (self.east_shift - other.east_shift)**2) else: slat, slon = self.effective_latlon rlat, rlon = other.effective_latlon return orthodrome.distance_accurate50m_numpy(slat, slon, rlat, rlon)
def test_point_in_polygon(self): if plot: import matplotlib.pyplot as plt for i in range(100): if plot: plt.clf() axes = plt.gca() lat0, lon0, radius, circle = random_circle(100) if plot: print(lat0, lon0, radius) lats = num.linspace(-90., 90., 100) lons = num.linspace(-180., 180., 200) points = num.empty((lons.size * lats.size, 2)) points[:, 0] = num.repeat(lats, lons.size) points[:, 1] = num.tile(lons, lats.size) mask = orthodrome.contains_points(circle, points) distances = orthodrome.distance_accurate50m_numpy( lat0, lon0, points[:, 0], points[:, 1]) mask2 = distances < radius mask3 = num.logical_and( num.not_equal(mask2, mask), num.abs(distances - radius) > radius / 100.) if plot: axes.plot(circle[:, 1], circle[:, 0], 'o', ms=1, color='black') axes.plot(points[mask, 1], points[mask, 0], 'o', ms=1, alpha=0.2, color='black') axes.plot(points[mask3, 1], points[mask3, 0], 'o', ms=1, color='red') if plot: plt.show() assert not num.any(mask3)
def grid_points_in_error_ellipses(lat_ev, lon_ev, depth_ev, error_h, error_z, path_models=None, gf_store_id=None): pathlist = Path(path_models).glob('model_%s_*' % gf_store_id) region = orthodrome.radius_to_region(lat_ev, lon_ev, error_h) grid_points = [] for path in sorted(pathlist): path = str(path) model_coordinates = path.split("_") lat = float(model_coordinates[3]) lon = float(model_coordinates[4]) depth = float(model_coordinates[5]) dists = orthodrome.distance_accurate50m_numpy(lat_ev, lon_ev, lat, lon) if dists < error_h: if depth_ev-error_z < depth and depth_ev+error_z > depth: grid_points.append(path) return grid_points
def random_conf(Ns): lats=np.random.uniform(20,40,Ns) lons=np.random.uniform(20,40,Ns) lats = np.around(lats, decimals=1) lons = np.around(lons, decimals=1) dis=np.zeros([Ns,Ns]) for i in range(0,Ns): dis[i] = orthodrome.distance_accurate50m_numpy(lats, lons, lats[i], lons[i]) dis /= 1000.0 dis = np.around(dis, decimals=0) iu1 = np.triu_indices(Ns) dis[iu1]=0 ar_mindis = dis[np.nonzero(dis)].min() ar_aperture = dis[np.nonzero(dis)].max() return lats, lons, ar_mindis, ar_aperture
def distance_to(self, other): ''' Compute surface distance [m] to other location object. ''' if self.same_origin(other): other_north_shift, other_east_shift = get_offset(other) return math.sqrt((self.north_shift - other_north_shift)**2 + (self.east_shift - other_east_shift)**2) else: slat, slon = self.effective_latlon rlat, rlon = get_effective_latlon(other) return float(orthodrome.distance_accurate50m_numpy( slat, slon, rlat, rlon)[0])
def subset_events_dist_cat(catalog, mag_min, mag_max, tmin, tmax, st_lat, st_lon, dist_min=None, dist_max=None): """ Extract a subset of events from event catalog :param catalog: Event catalog in pyrocko format :param mag_min: Min. magnitude :param mag_max: Max. magnitude :param tmin: string representing UTC time :param tmax: string representing UTC time :param format tmin: time string format ('%Y-%m-%d %H:%M:%S.OPTFRAC') :param format tmax: time string format ('%Y-%m-%d %H:%M:%S.OPTFRAC') :param dist_min: min. distance (km) :param dist_max: max. distance (km) :param depth_min :param depth_max :returns: list of events """ use_events = [] events = model.load_events(catalog) for ev in events: if ev.magnitude < mag_max and\ ev.magnitude > mag_min and\ ev.time < util.str_to_time(tmax) and\ ev.time > util.str_to_time(tmin): if dist_min or dist_max: dist = orthodrome.distance_accurate50m_numpy( ev.lat, ev.lon, st_lat, st_lon)/1000. if dist_min and dist_max and\ dist > dist_min and dist < dist_max: use_events.append(ev) if dist_min and not dist_max and dist > dist_min: use_events.append(ev) if dist_max and not dist_min and dist < dist_max: use_events.append(ev) return(use_events)
def get_center_station(stations, select_closest=False): ''' gravitations center of *stations* list.''' n = len(stations) slats = num.empty(n) slons = num.empty(n) for i, s in enumerate(stations): slats[i] = s.lat slons[i] = s.lon center_lat, center_lon = ortho.geographic_midpoint(slats, slons) if select_closest: center_lats = num.ones(n) * center_lat center_lons = num.ones(n) * center_lon dists = ortho.distance_accurate50m_numpy(center_lats, center_lons, slats, slons) return stations[num.argmin(dists)] else: return model.Station(center_lat, center_lon)
def get_center_station(stations, select_closest=False): ''' gravitations center of *stations* list.''' n = len(stations) slats = num.empty(n) slons = num.empty(n) for i, s in enumerate(stations): slats[i] = s.lat slons[i] = s.lon center_lat, center_lon = ortho.geographic_midpoint(slats, slons) if select_closest: center_lats = num.ones(n)*center_lat center_lons = num.ones(n)*center_lon dists = ortho.distance_accurate50m_numpy( center_lats, center_lons, slats, slons) return stations[num.argmin(dists)] else: return model.Station(center_lat, center_lon)
def subset_events_dist_evlist(ev_list, mag_min, mag_max, tmin, tmax, st_lat, st_lon, depth_min, depth_max, dist_min=None, dist_max=None): """ Extract a subset of events from event catalog :param ev_list: list of pyrocko events :param mag_min: Min. magnitude :param mag_max: Max. magnitude :param tmin: string representing UTC time :param tmax: string representing UTC time :param format tmin: time string format ('%Y-%m-%d %H:%M:%S.OPTFRAC') :param format tmax: time string format ('%Y-%m-%d %H:%M:%S.OPTFRAC') :param dist_min: min. distance (km) :param dist_max: max. distance (km) :param depth_min :param depth_max :returns: list of events """ use_events = [] for ev in ev_list: if (mag_min < ev.magnitude < mag_max) and \ (util.str_to_time(tmin) < ev.time < util.str_to_time(tmax)) and \ (float(depth_min) < ev.depth < float(depth_max)): if dist_min or dist_max: dist = orthodrome.distance_accurate50m_numpy( ev.lat, ev.lon, st_lat, st_lon)/1000. if dist_min and dist_max and dist > dist_min and dist < dist_max: use_events.append(ev) if dist_min and not dist_max and dist > dist_min: use_events.append(ev) if dist_max and not dist_min and dist < dist_max: use_events.append(ev) return(use_events)
def update_distances(self, indices=None): '''Calculate and update distances between events.''' indices = indices or [[]] indices = [i for ii in indices for i in ii] if len(indices) != 1 or self.marker_table_view.horizontalHeader()\ .isSectionHidden(_column_mapping['Dist [km]']): return if self.last_active_event == self.pile_viewer.get_active_event(): return else: self.last_active_event = self.pile_viewer.get_active_event() markers = self.pile_viewer.markers omarker = markers[indices[0]] if not isinstance(omarker, EventMarker): return emarkers = [m for m in markers if isinstance(m, EventMarker)] if len(emarkers) < 2: return lats = num.zeros(len(emarkers)) lons = num.zeros(len(emarkers)) for i in xrange(len(emarkers)): lats[i] = emarkers[i].get_event().lat lons[i] = emarkers[i].get_event().lon olats = num.zeros(len(emarkers)) olons = num.zeros(len(emarkers)) olats[:] = omarker.get_event().lat olons[:] = omarker.get_event().lon dists = orthodrome.distance_accurate50m_numpy(lats, lons, olats, olons) dists /= 1000. self.distances = dict(zip(emarkers, dists)) self.marker_table_view.viewport().repaint() self.emit(qc.SIGNAL('dataChanged()')) if self.marker_table_view.horizontalHeader().sortIndicatorSection() ==\ _column_mapping['Dist [km]']: self.sort(_column_mapping['Dist [km]'])
def AperMin(lat, lon): ''' This function returns array aperture and minimum distance between stations. inputs are a set of lat and lon of stations. ''' Ns = len(lat) dis=np.zeros([Ns, Ns]) for i in range(0,Ns): dis[i] = orthodrome.distance_accurate50m_numpy(lats, lons, lats[i], lons[i]) dis /= 1000.0 dis = np.around(dis, decimals=0) iu1 = np.triu_indices(Ns) dis[iu1]=0 ar_mindis = dis[np.nonzero(dis)].min() ar_aperture = dis[np.nonzero(dis)].max() return ar_mindis, ar_aperture
def update_distances(self, indices): '''Calculate and update distances between events.''' if len(indices) != 1 or self.marker_table_view.horizontalHeader()\ .isSectionHidden(_column_mapping['Dist [km]']): return if self.last_active_event == self.pile_viewer.get_active_event(): return else: self.last_active_event = self.pile_viewer.get_active_event() index = indices[0] markers = self.pile_viewer.markers omarker = markers[index] if not isinstance(omarker, EventMarker): return emarkers = [m for m in markers if isinstance(m, EventMarker)] if len(emarkers) < 2: return lats = num.zeros(len(emarkers)) lons = num.zeros(len(emarkers)) for i in xrange(len(emarkers)): lats[i] = emarkers[i].get_event().lat lons[i] = emarkers[i].get_event().lon olats = num.zeros(len(emarkers)) olons = num.zeros(len(emarkers)) olats[:] = omarker.get_event().lat olons[:] = omarker.get_event().lon dists = orthodrome.distance_accurate50m_numpy(lats, lons, olats, olons) dists /= 1000. self.distances = dict(zip(emarkers, dists)) self.emit(SIGNAL('dataChanged()')) # expensive! self.reset()
def get_distance_epi(eventi, eventj): '''Normalized Euclidean epicentral distance. The normalization assumes largest considered distance is 1000 km. ''' maxdist_km = 1000. a_lats, a_lons, b_lats, b_lons = \ eventi.lat, eventi.lon, eventj.lat, eventj.lon if (a_lats == b_lats) and (a_lons == b_lons): d = 0. else: distance_m = orthodrome.distance_accurate50m_numpy( a_lats, a_lons, b_lats, b_lons) distance_km = distance_m / 1000. d = distance_km / maxdist_km if d >= 1.: d = 1. return d
def distance_to(self, other): ''' Compute surface distance [m] to other location object. ''' if self.same_origin(other): if isinstance(other, Location): return math.sqrt((self.north_shift - other.north_shift)**2 + (self.east_shift - other.east_shift)**2) else: return 0.0 else: slat, slon = self.effective_latlon try: rlat, rlon = other.effective_latlon except AttributeError: rlat, rlon = other.lat, other.lon return float(orthodrome.distance_accurate50m_numpy( slat, slon, rlat, rlon)[0])
def distance_to(self, other): ''' Compute surface distance [m] to other location object. ''' if self.same_origin(other): if isinstance(other, Location): return math.sqrt((self.north_shift - other.north_shift)**2 + (self.east_shift - other.east_shift)**2) else: return 0.0 else: slat, slon = self.effective_latlon try: rlat, rlon = other.effective_latlon except AttributeError: rlat, rlon = other.lat, other.lon return float( orthodrome.distance_accurate50m_numpy(slat, slon, rlat, rlon)[0])
def testDistanceArrayC(self): ntest = 10000 locs = self.get_critical_random_locations(ntest) orthodrome.distance_accurate50m_numpy(*locs, implementation='c')
def main(args=None): if args is None: args = sys.argv[1:] parser = OptionParser( usage=usage, description=description) parser.add_option( '--width', dest='width', type='float', default=20.0, metavar='FLOAT', help='set width of output image [cm] (%default)') parser.add_option( '--height', dest='height', type='float', default=15.0, metavar='FLOAT', help='set height of output image [cm] (%default)') parser.add_option( '--topo-resolution-min', dest='topo_resolution_min', type='float', default=40.0, metavar='FLOAT', help='minimum resolution of topography [dpi] (%default)') parser.add_option( '--topo-resolution-max', dest='topo_resolution_max', type='float', default=200.0, metavar='FLOAT', help='maximum resolution of topography [dpi] (%default)') parser.add_option( '--no-grid', dest='show_grid', default=True, action='store_false', help='don\'t show grid lines') parser.add_option( '--no-topo', dest='show_topo', default=True, action='store_false', help='don\'t show topography') parser.add_option( '--no-cities', dest='show_cities', default=True, action='store_false', help='don\'t show cities') parser.add_option( '--no-illuminate', dest='illuminate', default=True, action='store_false', help='deactivate artificial illumination of topography') parser.add_option( '--illuminate-factor-land', dest='illuminate_factor_land', type='float', metavar='FLOAT', help='set factor for artificial illumination of land (0.5)') parser.add_option( '--illuminate-factor-ocean', dest='illuminate_factor_ocean', type='float', metavar='FLOAT', help='set factor for artificial illumination of ocean (0.25)') parser.add_option( '--theme', choices=['topo', 'soft'], default='topo', help='select color theme, available: topo, soft (topo)"') parser.add_option( '--download-etopo1', dest='download_etopo1', action='store_true', help='download full ETOPO1 topography dataset') parser.add_option( '--download-srtmgl3', dest='download_srtmgl3', action='store_true', help='download full SRTMGL3 topography dataset') parser.add_option( '--make-decimated-topo', dest='make_decimated', action='store_true', help='pre-make all decimated topography datasets') parser.add_option( '--stations', dest='stations_fn', metavar='FILENAME', help='load station coordinates from FILENAME') parser.add_option( '--events', dest='events_fn', metavar='FILENAME', help='load event coordinates from FILENAME') parser.add_option( '--debug', dest='debug', action='store_true', default=False, help='print debugging information to stderr') (options, args) = parser.parse_args(args) if options.debug: util.setup_logging(program_name, 'debug') else: util.setup_logging(program_name, 'info') if options.download_etopo1: import pyrocko.datasets.topo.etopo1 pyrocko.datasets.topo.etopo1.download() if options.download_srtmgl3: import pyrocko.datasets.topo.srtmgl3 pyrocko.datasets.topo.srtmgl3.download() if options.make_decimated: import pyrocko.datasets.topo pyrocko.datasets.topo.make_all_missing_decimated() if (options.download_etopo1 or options.download_srtmgl3 or options.make_decimated) and len(args) == 0: sys.exit(0) if options.theme == 'soft': color_kwargs = { 'illuminate_factor_land': options.illuminate_factor_land or 0.2, 'illuminate_factor_ocean': options.illuminate_factor_ocean or 0.15, 'color_wet': (216, 242, 254), 'color_dry': (238, 236, 230), 'topo_cpt_wet': 'light_sea_uniform', 'topo_cpt_dry': 'light_land_uniform'} elif options.theme == 'topo': color_kwargs = { 'illuminate_factor_land': options.illuminate_factor_land or 0.5, 'illuminate_factor_ocean': options.illuminate_factor_ocean or 0.25} events = [] if options.events_fn: events = model.load_events(options.events_fn) stations = [] if options.stations_fn: stations = model.load_stations(options.stations_fn) if not (len(args) == 4 or ( len(args) == 1 and (events or stations))): parser.print_help() sys.exit(1) if len(args) == 4: try: lat = float(args[0]) lon = float(args[1]) radius = float(args[2])*km except Exception: parser.print_help() sys.exit(1) else: lats, lons = latlon_arrays(stations+events) lat, lon = map(float, od.geographic_midpoint(lats, lons)) radius = float( num.max(od.distance_accurate50m_numpy(lat, lon, lats, lons))) radius *= 1.1 m = automap.Map( width=options.width, height=options.height, lat=lat, lon=lon, radius=radius, topo_resolution_max=options.topo_resolution_max, topo_resolution_min=options.topo_resolution_min, show_topo=options.show_topo, show_grid=options.show_grid, illuminate=options.illuminate, **color_kwargs) logger.debug('map configuration:\n%s' % str(m)) if options.show_cities: m.draw_cities() if stations: lats = [s.lat for s in stations] lons = [s.lon for s in stations] m.gmt.psxy( in_columns=(lons, lats), S='t8p', G='black', *m.jxyr) for s in stations: m.add_label(s.lat, s.lon, '%s' % '.'.join( x for x in s.nsl() if x)) if events: beachball_symbol = 'mt' beachball_size = 20.0 for ev in events: if ev.moment_tensor is None: m.gmt.psxy( in_rows=[[ev.lon, ev.lat]], S='c12p', G=gmtpy.color('scarletred2'), W='1p,black', *m.jxyr) else: devi = ev.moment_tensor.deviatoric() mt = devi.m_up_south_east() mt = mt / ev.moment_tensor.scalar_moment() \ * pmt.magnitude_to_moment(5.0) m6 = pmt.to6(mt) data = (ev.lon, ev.lat, 10) + tuple(m6) + (1, 0, 0) if m.gmt.is_gmt5(): kwargs = dict( M=True, S='%s%g' % ( beachball_symbol[0], beachball_size / gmtpy.cm)) else: kwargs = dict( S='%s%g' % ( beachball_symbol[0], beachball_size*2 / gmtpy.cm)) m.gmt.psmeca( in_rows=[data], G=gmtpy.color('chocolate1'), E='white', W='1p,%s' % gmtpy.color('chocolate3'), *m.jxyr, **kwargs) m.save(args[-1])
def optparse( required=(), optional=(), args=sys.argv, usage='%prog [options]', descr=None): want = required + optional parser = OptionParser( prog='cake', usage=usage, description=descr.capitalize()+'.', add_help_option=False, formatter=util.BetterHelpFormatter()) parser.add_option( '-h', '--help', action='help', help='Show help message and exit.') if 'phases' in want: group = OptionGroup(parser, 'Phases', ''' Seismic phase arrivals may be either specified as traditional phase names (e.g. P, S, PP, PcP, ...) or in Cake's own syntax which is more powerful. Use the --classic option, for traditional phase names. Use the --phase option if you want to define phases in Cake's syntax. ''') group.add_option( '--phase', '--phases', dest='phases', action="append", default=[], metavar='PHASE1,PHASE2,...', help='''Comma separated list of seismic phases in Cake\'s syntax. The definition of a seismic propagation path in Cake's phase syntax is a string consisting of an alternating sequence of "legs" and "knees". A "leg" represents seismic wave propagation without any conversions, encountering only super-critical reflections. Legs are denoted by "P", "p", "S", or "s". The capital letters are used when the take-off of the "leg" is in downward direction, while the lower case letters indicate a take-off in upward direction. A "knee" is an interaction with an interface. It can be a mode conversion, a reflection, or propagation as a headwave or diffracted wave. * conversion is simply denoted as: "(INTERFACE)" or "DEPTH" * upperside reflection: "v(INTERFACE)" or "vDEPTH" * underside reflection: "^(INTERFACE)" or "^DEPTH" * normal kind headwave or diffracted wave: "v_(INTERFACE)" or "v_DEPTH" The interface may be given by name or by depth: INTERFACE is the name of an interface defined in the model, DEPTH is the depth of an interface in [km] (the interface closest to that depth is chosen). If two legs appear consecutively without an explicit "knee", surface interaction is assumed. The preferred standard interface names in cake are "conrad", "moho", "cmb" (core-mantle boundary), and "cb" (inner core boundary). The phase definition may end with a backslash "\\", to indicate that the ray should arrive at the receiver from above instead of from below. It is possible to restrict the maximum and minimum depth of a "leg" by appending "<(INTERFACE)" or "<DEPTH" or ">(INTERFACE)" or ">DEPTH" after the leg character, respectively. When plotting rays or travel-time curves, the color can be set by appending "{COLOR}" to the phase definition, where COLOR is the name of a color or an RGB or RGBA color tuple in the format "R/G/B" or "R/G/B/A", respectively. The values can be normalized to the range [0, 1] or to [0, 255]. The latter is only assumed when any of the values given exceeds 1.0. ''') group.add_option( '--classic', dest='classic_phases', action='append', default=[], metavar='PHASE1,PHASE2,...', help='''Comma separated list of seismic phases in classic nomenclature. Run "cake list-phase-map" for a list of available phase names. When plotting, color can be specified in the same way as in --phases.''') parser.add_option_group(group) if 'model' in want: group = OptionGroup(parser, 'Model') group.add_option( '--model', dest='model_filename', metavar='(NAME or FILENAME)', help='Use builtin model named NAME or user model from file ' 'FILENAME. By default, the "ak135-f-continental.m" model is ' 'used. Run "cake list-models" for a list of builtin models.') group.add_option( '--format', dest='model_format', metavar='FORMAT', choices=['nd', 'hyposat'], default='nd', help='Set model file format (available: nd, hyposat; default: ' 'nd).') group.add_option( '--crust2loc', dest='crust2loc', metavar='LAT,LON', help='Set model from CRUST2.0 profile at location (LAT,LON).') group.add_option( '--crust2profile', dest='crust2profile', metavar='KEY', help='Set model from CRUST2.0 profile with given KEY.') parser.add_option_group(group) if any(x in want for x in ( 'zstart', 'zstop', 'distances', 'sloc', 'rloc')): group = OptionGroup(parser, 'Source-receiver geometry') if 'zstart' in want: group.add_option( '--sdepth', dest='sdepth', type='float', default=0.0, metavar='FLOAT', help='Source depth [km] (default: 0)') if 'zstop' in want: group.add_option( '--rdepth', dest='rdepth', type='float', default=0.0, metavar='FLOAT', help='Receiver depth [km] (default: 0)') if 'distances' in want: group.add_option( '--distances', dest='sdist', metavar='DISTANCES', help='Surface distances as "start:stop:n" or ' '"dist1,dist2,..." [km]') group.add_option( '--sloc', dest='sloc', metavar='LAT,LON', help='Source location (LAT,LON).') group.add_option( '--rloc', dest='rloc', metavar='LAT,LON', help='Receiver location (LAT,LON).') parser.add_option_group(group) if 'material' in want: group = OptionGroup( parser, 'Material', 'An isotropic elastic material may be specified by giving ' 'a combination of some of the following options. ') group.add_option( '--vp', dest='vp', default=None, type='float', metavar='FLOAT', help='P-wave velocity [km/s]') group.add_option( '--vs', dest='vs', default=None, type='float', metavar='FLOAT', help='S-wave velocity [km/s]') group.add_option( '--rho', dest='rho', default=None, type='float', metavar='FLOAT', help='density [g/cm**3]') group.add_option( '--qp', dest='qp', default=None, type='float', metavar='FLOAT', help='P-wave attenuation Qp (default: 1456)') group.add_option( '--qs', dest='qs', default=None, type='float', metavar='FLOAT', help='S-wave attenuation Qs (default: 600)') group.add_option( '--poisson', dest='poisson', default=None, type='float', metavar='FLOAT', help='Poisson ratio') group.add_option( '--lambda', dest='lame_lambda', default=None, type='float', metavar='FLOAT', help='Lame parameter lambda [GPa]') group.add_option( '--mu', dest='lame_mu', default=None, type='float', metavar='FLOAT', help='Shear modulus [GPa]') group.add_option( '--qk', dest='qk', default=None, type='float', metavar='FLOAT', help='Bulk attenuation Qk') group.add_option( '--qmu', dest='qmu', default=None, type='float', metavar='FLOAT', help='Shear attenuation Qmu') parser.add_option_group(group) if any(x in want for x in ( 'vred', 'as_degrees', 'accuracy', 'slowness', 'interface', 'aspect', 'shade_model')): group = OptionGroup(parser, 'General') if 'vred' in want: group.add_option( '--vred', dest='vred', type='float', metavar='FLOAT', help='Velocity for time reduction in plot [km/s]') if 'as_degrees' in want: group.add_option( '--degrees', dest='as_degrees', action='store_true', default=False, help='Distances are in [deg] instead of [km], velocities in ' '[deg/s] instead of [km/s], slownesses in [s/deg] ' 'instead of [s/km].') if 'accuracy' in want: group.add_option( '--accuracy', dest='accuracy', type='float', metavar='MAXIMUM_RELATIVE_RMS', default=0.002, help='Set accuracy for model simplification.') if 'slowness' in want: group.add_option( '--slowness', dest='slowness', type='float', metavar='FLOAT', default=0.0, help='Select surface slowness [s/km] (default: 0)') if 'interface' in want: group.add_option( '--interface', dest='interface', metavar='(NAME or DEPTH)', help='Name or depth [km] of interface to select') if 'aspect' in want: group.add_option( '--aspect', dest='aspect', type='float', metavar='FLOAT', help='Aspect ratio for plot') if 'shade_model' in want: group.add_option( '--no-shade-model', dest='shade_model', action='store_false', default=True, help='Suppress shading of earth model layers') parser.add_option_group(group) if any(x in want for x in ('output_format',)): group = OptionGroup(parser, 'Output') if 'output_format' in want: group.add_option( '--output-format', dest='output_format', metavar='FORMAT', default='textual', choices=('textual', 'nd'), help='Set model output format (available: textual, nd, ' 'default: textual)') parser.add_option_group(group) if usage == 'cake help-options': parser.print_help() (options, args) = parser.parse_args(args) if len(args) != 2: parser.error( 'Cake arguments should look like "--option" or "--option=...".') d = {} as_degrees = False if 'as_degrees' in want: as_degrees = options.as_degrees d['as_degrees'] = as_degrees if 'accuracy' in want: d['accuracy'] = options.accuracy if 'output_format' in want: d['output_format'] = options.output_format if 'aspect' in want: d['aspect'] = options.aspect if 'shade_model' in want: d['shade_model'] = options.shade_model if 'phases' in want: phases = [] phase_colors = {} try: for ss in options.phases: for s in ss.split(','): s = process_color(s, phase_colors) phases.append(cake.PhaseDef(s)) for pp in options.classic_phases: for p in pp.split(','): p = process_color(p, phase_colors) phases.extend(cake.PhaseDef.classic(p)) except (cake.PhaseDefParseError, cake.UnknownClassicPhase) as e: parser.error(e) if not phases and 'phases' in required: s = process_color('P', phase_colors) phases.append(cake.PhaseDef(s)) if phases: d['phase_colors'] = phase_colors d['phases'] = phases if 'model' in want: if options.model_filename: d['model'] = cake.load_model( options.model_filename, options.model_format) if options.crust2loc or options.crust2profile: if options.crust2loc: try: args = tuple( [float(x) for x in options.crust2loc.split(',')]) except Exception: parser.error( 'format for --crust2loc option is ' '"LATITUDE,LONGITUDE"') elif options.crust2profile: args = (options.crust2profile.upper(),) else: assert False if 'model' in d: d['model'] = d['model'].replaced_crust(args) else: from pyrocko import crust2x2 profile = crust2x2.get_profile(*args) d['model'] = cake.LayeredModel.from_scanlines( cake.from_crust2x2_profile(profile)) if 'vred' in want: d['vred'] = options.vred if d['vred'] is not None: if not as_degrees: d['vred'] *= r2d * cake.km / cake.earthradius if 'distances' in want: distances = None if options.sdist: if options.sdist.find(':') != -1: ssn = options.sdist.split(':') if len(ssn) != 3: parser.error( 'format for distances is ' '"min_distance:max_distance:n_distances"') distances = num.linspace(*map(float, ssn)) else: distances = num.array( list(map( float, options.sdist.split(','))), dtype=num.float) if not as_degrees: distances *= r2d * cake.km / cake.earthradius if options.sloc and options.rloc: try: slat, slon = tuple([float(x) for x in options.sloc.split(',')]) rlat, rlon = tuple([float(x) for x in options.rloc.split(',')]) except Exception: parser.error( 'format for --sloc and --rloc options is ' '"LATITUDE,LONGITUDE"') distance_sr = orthodrome.distance_accurate50m_numpy( slat, slon, rlat, rlon) distance_sr *= r2d / cake.earthradius if distances is not None: distances = num.concatenate((distances, [distance_sr])) else: distances = num.array([distance_sr], dtype=num.float) if distances is not None: d['distances'] = distances else: if 'distances' not in required: d['distances'] = None if 'slowness' in want: d['slowness'] = options.slowness/cake.d2r if not as_degrees: d['slowness'] /= cake.km*cake.m2d if 'interface' in want: if options.interface: try: d['interface'] = float(options.interface)*cake.km except ValueError: d['interface'] = options.interface else: d['interface'] = None if 'zstart' in want: d['zstart'] = options.sdepth*cake.km if 'zstop' in want: d['zstop'] = options.rdepth*cake.km if 'material' in want: md = {} userfactor = dict( vp=1000., vs=1000., rho=1000., qp=1., qs=1., qmu=1., qk=1., lame_lambda=1.0e9, lame_mu=1.0e9, poisson=1.) for k in userfactor.keys(): if getattr(options, k) is not None: md[k] = getattr(options, k) * userfactor[k] if not (bool('lame_lambda' in md) == bool('lame_mu' in md)): parser.error('lambda and mu must be specified both.') if 'lame_lambda' in md and 'lame_mu' in md: md['lame'] = md.pop('lame_lambda'), md.pop('lame_mu') if md: try: d['material'] = cake.Material(**md) except cake.InvalidArguments as e: parser.error(str(e)) for k in list(d.keys()): if k not in want: del d[k] for k in required: if k not in d: if k == 'model': d['model'] = cake.load_model('ak135-f-continental.m') elif k == 'distances': d['distances'] = num.linspace(10*cake.km, 100*cake.km, 10) \ / cake.earthradius * r2d elif k == 'phases': d['phases'] = list(map(cake.PhaseDef, 'Pp')) else: parser.error('missing %s' % k) return Anon(d)
def main(): parser = OptionParser(usage=usage, description=description) parser.add_option('--force', dest='force', action='store_true', default=False, help='allow recreation of output <directory>') parser.add_option('--debug', dest='debug', action='store_true', default=False, help='print debugging information to stderr') parser.add_option('--dry-run', dest='dry_run', action='store_true', default=False, help='show available stations/channels and exit ' '(do not download waveforms)') parser.add_option('--continue', dest='continue_', action='store_true', default=False, help='continue download after a accident') parser.add_option('--local-data', dest='local_data', action='append', help='add file/directory with local data') parser.add_option('--local-stations', dest='local_stations', action='append', help='add local stations file') parser.add_option('--selection', dest='selection_file', action='append', help='add local stations file') parser.add_option( '--local-responses-resp', dest='local_responses_resp', action='append', help='add file/directory with local responses in RESP format') parser.add_option('--local-responses-pz', dest='local_responses_pz', action='append', help='add file/directory with local pole-zero responses') parser.add_option( '--local-responses-stationxml', dest='local_responses_stationxml', help='add file with local response information in StationXML format') parser.add_option( '--window', dest='window', default='full', help='set time window to choose [full, p, "<time-start>,<time-end>"' '] (time format is YYYY-MM-DD HH:MM:SS)') parser.add_option( '--out-components', choices=['enu', 'rtu'], dest='out_components', default='rtu', help='set output component orientations to radial-transverse-up [rtu] ' '(default) or east-north-up [enu]') parser.add_option('--out-units', choices=['M', 'M/S', 'M/S**2'], dest='output_units', default='M', help='set output units to displacement "M" (default),' ' velocity "M/S" or acceleration "M/S**2"') parser.add_option( '--padding-factor', type=float, default=3.0, dest='padding_factor', help='extend time window on either side, in multiples of 1/<fmin_hz> ' '(default: 5)') parser.add_option( '--zero-padding', dest='zero_pad', action='store_true', default=False, help='Extend traces by zero-padding if clean restitution requires' 'longer windows') parser.add_option( '--credentials', dest='user_credentials', action='append', default=[], metavar='SITE,USER,PASSWD', help='user credentials for specific site to access restricted data ' '(this option can be repeated)') parser.add_option( '--token', dest='auth_tokens', metavar='SITE,FILENAME', action='append', default=[], help='user authentication token for specific site to access ' 'restricted data (this option can be repeated)') parser.add_option( '--sites', dest='sites', metavar='SITE1,SITE2,...', default='geofon,iris,orfeus', help='sites to query (available: %s, default: "%%default"' % ', '.join(g_sites_available)) parser.add_option( '--band-codes', dest='priority_band_code', metavar='V,L,M,B,H,S,E,...', default='B,H', help='select and prioritize band codes (default: %default)') parser.add_option( '--instrument-codes', dest='priority_instrument_code', metavar='H,L,G,...', default='H,L', help='select and prioritize instrument codes (default: %default)') parser.add_option('--radius-min', dest='radius_min', metavar='VALUE', default=0.0, type=float, help='minimum radius [km]') parser.add_option('--nstations-wanted', dest='nstations_wanted', metavar='N', type=int, help='number of stations to select initially') (options, args) = parser.parse_args(sys.argv[1:]) print('Parsed arguments:', args) if len(args) not in (10, 7, 6): parser.print_help() sys.exit(1) if options.debug: util.setup_logging(program_name, 'debug') else: util.setup_logging(program_name, 'info') if options.local_responses_pz and options.local_responses_resp: logger.critical('cannot use local responses in PZ and RESP ' 'format at the same time') sys.exit(1) n_resp_opt = 0 for resp_opt in (options.local_responses_pz, options.local_responses_resp, options.local_responses_stationxml): if resp_opt: n_resp_opt += 1 if n_resp_opt > 1: logger.critical('can only handle local responses from either PZ or ' 'RESP or StationXML. Cannot yet merge different ' 'response formats.') sys.exit(1) if options.local_responses_resp and not options.local_stations: logger.critical('--local-responses-resp can only be used ' 'when --stations is also given.') sys.exit(1) try: ename = '' magnitude = None mt = None if len(args) == 10: time = util.str_to_time(args[1] + ' ' + args[2]) lat = float(args[3]) lon = float(args[4]) depth = float(args[5]) * km iarg = 6 elif len(args) == 7: if args[2].find(':') == -1: sname_or_date = None lat = float(args[1]) lon = float(args[2]) event = None time = None else: sname_or_date = args[1] + ' ' + args[2] iarg = 3 elif len(args) == 6: sname_or_date = args[1] iarg = 2 if len(args) in (7, 6) and sname_or_date is not None: events = get_events_by_name_or_date([sname_or_date], catalog=geofon) if len(events) == 0: logger.critical('no event found') sys.exit(1) elif len(events) > 1: logger.critical('more than one event found') sys.exit(1) event = events[0] time = event.time lat = event.lat lon = event.lon depth = event.depth ename = event.name magnitude = event.magnitude mt = event.moment_tensor radius = float(args[iarg]) * km fmin = float(args[iarg + 1]) sample_rate = float(args[iarg + 2]) eventname = args[iarg + 3] cwd = str(sys.argv[1]) event_dir = op.join(cwd, 'data', 'events', eventname) output_dir = op.join(event_dir, 'waveforms') except: raise parser.print_help() sys.exit(1) if options.force and op.isdir(event_dir): if not options.continue_: shutil.rmtree(event_dir) if op.exists(event_dir) and not options.continue_: logger.critical( 'directory "%s" exists. Delete it first or use the --force option' % event_dir) sys.exit(1) util.ensuredir(output_dir) if time is not None: event = model.Event(time=time, lat=lat, lon=lon, depth=depth, name=ename, magnitude=magnitude, moment_tensor=mt) if options.window == 'full': if event is None: logger.critical('need event for --window=full') sys.exit(1) low_velocity = 1500. timewindow = VelocityWindow(low_velocity, tpad=options.padding_factor / fmin) tmin, tmax = timewindow(time, radius, depth) elif options.window == 'p': if event is None: logger.critical('need event for --window=p') sys.exit(1) phases = list(map(cake.PhaseDef, 'P p'.split())) emod = cake.load_model() tpad = options.padding_factor / fmin timewindow = PhaseWindow(emod, phases, -tpad, tpad) arrivaltimes = [] for dist in num.linspace(0, radius, 20): try: arrivaltimes.extend(timewindow(time, dist, depth)) except NoArrival: pass if not arrivaltimes: logger.error('required phase arrival not found') sys.exit(1) tmin = min(arrivaltimes) tmax = max(arrivaltimes) else: try: stmin, stmax = options.window.split(',') tmin = util.str_to_time(stmin.strip()) tmax = util.str_to_time(stmax.strip()) timewindow = FixedWindow(tmin, tmax) except ValueError: logger.critical('invalid argument to --window: "%s"' % options.window) sys.exit(1) if event is not None: event.name = eventname tfade = tfade_factor / fmin tpad = tfade tmin -= tpad tmax += tpad tinc = None priority_band_code = options.priority_band_code.split(',') for s in priority_band_code: if len(s) != 1: logger.critical('invalid band code: %s' % s) priority_instrument_code = options.priority_instrument_code.split(',') for s in priority_instrument_code: if len(s) != 1: logger.critical('invalid instrument code: %s' % s) station_query_conf = dict(latitude=lat, longitude=lon, minradius=options.radius_min * km * cake.m2d, maxradius=radius * cake.m2d, channel=','.join('%s??' % s for s in priority_band_code)) target_sample_rate = sample_rate fmax = target_sample_rate # target_sample_rate = None # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S'] priority_units = ['M/S', 'M', 'M/S**2'] # output_units = 'M' sites = [x.strip() for x in options.sites.split(',') if x.strip()] for site in sites: if site not in g_sites_available: logger.critical('unknown FDSN site: %s' % site) sys.exit(1) for s in options.user_credentials: try: site, user, passwd = s.split(',') g_user_credentials[site] = user, passwd except ValueError: logger.critical('invalid format for user credentials: "%s"' % s) sys.exit(1) for s in options.auth_tokens: try: site, token_filename = s.split(',') with open(token_filename, 'r') as f: g_auth_tokens[site] = f.read() except (ValueError, OSError, IOError): logger.critical('cannot get token from file: %s' % token_filename) sys.exit(1) fn_template0 = \ 'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed' fn_template_raw = op.join(output_dir, 'raw', fn_template0) fn_stations_raw = op.join(output_dir, 'stations.raw.txt') fn_template_rest = op.join(output_dir, 'rest', fn_template0) fn_commandline = op.join(output_dir, 'beatdown.command') ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax) # chapter 1: download sxs = [] for site in sites: try: extra_args = { 'iris': dict(matchtimeseries=True), }.get(site, {}) extra_args.update(station_query_conf) if site == 'geonet': extra_args.update(starttime=tmin, endtime=tmax) else: extra_args.update(startbefore=tmax, endafter=tmin, includerestricted=(site in g_user_credentials or site in g_auth_tokens)) logger.info('downloading channel information (%s)' % site) sx = fdsn.station(site=site, format='text', level='channel', **extra_args) except fdsn.EmptyResult: logger.error('No stations matching given criteria. (%s)' % site) sx = None if sx is not None: sxs.append(sx) if all(sx is None for sx in sxs) and not options.local_data: sys.exit(1) nsl_to_sites = defaultdict(list) nsl_to_station = {} if options.selection_file: logger.info('using stations from stations file!') stations = [] for fn in options.selection_file: stations.extend(model.load_stations(fn)) nsls_selected = set(s.nsl() for s in stations) else: nsls_selected = None for sx, site in zip(sxs, sites): site_stations = sx.get_pyrocko_stations() for s in site_stations: nsl = s.nsl() nsl_to_sites[nsl].append(site) if nsl not in nsl_to_station: if nsls_selected: if nsl in nsls_selected: nsl_to_station[nsl] = s else: nsl_to_station[ nsl] = s # using first site with this station logger.info('number of stations found: %i' % len(nsl_to_station)) # station weeding if options.nstations_wanted: nsls_selected = None stations_all = [ nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys()) ] for s in stations_all: s.set_event_relative_data(event) stations_selected = weeding.weed_stations(stations_all, options.nstations_wanted)[0] nsls_selected = set(s.nsl() for s in stations_selected) logger.info('number of stations selected: %i' % len(nsls_selected)) if tinc is None: tinc = 3600. have_data = set() if options.continue_: fns = glob.glob(fn_template_raw % starfill()) p = pile.make_pile(fns) else: fns = [] have_data_site = {} could_have_data_site = {} for site in sites: have_data_site[site] = set() could_have_data_site[site] = set() available_through = defaultdict(set) it = 0 nt = int(math.ceil((tmax - tmin) / tinc)) for it in range(nt): tmin_win = tmin + it * tinc tmax_win = min(tmin + (it + 1) * tinc, tmax) logger.info('time window %i/%i (%s - %s)' % (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win))) have_data_this_window = set() if options.continue_: trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False) for tr in trs_avail: have_data_this_window.add(tr.nslc_id) for site, sx in zip(sites, sxs): if sx is None: continue selection = [] channels = sx.choose_channels( target_sample_rate=target_sample_rate, priority_band_code=priority_band_code, priority_units=priority_units, priority_instrument_code=priority_instrument_code, timespan=(tmin_win, tmax_win)) for nslc in sorted(channels.keys()): if nsls_selected is not None and nslc[:3] not in nsls_selected: continue could_have_data_site[site].add(nslc) if nslc not in have_data_this_window: channel = channels[nslc] if event: lat_, lon_ = event.lat, event.lon else: lat_, lon_ = lat, lon try: dist = orthodrome.distance_accurate50m_numpy( lat_, lon_, channel.latitude.value, channel.longitude.value) except: dist = orthodrome.distance_accurate50m_numpy( lat_, lon_, channel.latitude, channel.longitude) if event: depth_ = event.depth time_ = event.time else: depth_ = None time_ = None tmin_, tmax_ = timewindow(time_, dist, depth_) tmin_this = tmin_ - tpad tmax_this = float(tmax_ + tpad) tmin_req = max(tmin_win, tmin_this) tmax_req = min(tmax_win, tmax_this) if channel.sample_rate: try: deltat = 1.0 / int(channel.sample_rate.value) except: deltat = 1.0 / int(channel.sample_rate) else: deltat = 1.0 if tmin_req < tmax_req: logger.debug('deltat %f' % deltat) # extend time window by some samples because otherwise # sometimes gaps are produced # apparently the WS are only sensitive to full seconds # round to avoid gaps, increase safetiy window selection.append(nslc + (math.floor(tmin_req - deltat * 20.0), math.ceil(tmax_req + deltat * 20.0))) if options.dry_run: for (net, sta, loc, cha, tmin, tmax) in selection: available_through[net, sta, loc, cha].add(site) else: neach = 100 i = 0 nbatches = ((len(selection) - 1) // neach) + 1 while i < len(selection): selection_now = selection[i:i + neach] f = tempfile.NamedTemporaryFile() try: sbatch = '' if nbatches > 1: sbatch = ' (batch %i/%i)' % ( (i // neach) + 1, nbatches) logger.info('downloading data (%s)%s' % (site, sbatch)) data = fdsn.dataselect(site=site, selection=selection_now, **get_user_credentials(site)) while True: buf = data.read(1024) if not buf: break f.write(buf) f.flush() trs = io.load(f.name) for tr in trs: tr.fix_deltat_rounding_errors() logger.debug('cutting window: %f - %f' % (tmin_win, tmax_win)) logger.debug( 'available window: %f - %f, nsamples: %g' % (tr.tmin, tr.tmax, tr.ydata.size)) try: logger.debug('tmin before snap %f' % tr.tmin) tr.snap(interpolate=True) logger.debug('tmin after snap %f' % tr.tmin) tr.chop(tmin_win, tmax_win, snap=(math.floor, math.ceil), include_last=True) logger.debug( 'cut window: %f - %f, nsamles: %g' % (tr.tmin, tr.tmax, tr.ydata.size)) have_data.add(tr.nslc_id) have_data_site[site].add(tr.nslc_id) except trace.NoData: pass fns2 = io.save(trs, fn_template_raw) for fn in fns2: if fn in fns: logger.warn('overwriting file %s', fn) fns.extend(fns2) except fdsn.EmptyResult: pass except HTTPError: logger.warn('an error occurred while downloading data ' 'for channels \n %s' % '\n '.join('.'.join(x[:4]) for x in selection_now)) f.close() i += neach if options.dry_run: nslcs = sorted(available_through.keys()) all_channels = defaultdict(set) all_stations = defaultdict(set) def plural_s(x): return '' if x == 1 else 's' for nslc in nslcs: sites = tuple(sorted(available_through[nslc])) logger.info('selected: %s.%s.%s.%s from site%s %s' % (nslc + (plural_s(len(sites)), '+'.join(sites)))) all_channels[sites].add(nslc) all_stations[sites].add(nslc[:3]) nchannels_all = 0 nstations_all = 0 for sites in sorted(all_channels.keys(), key=lambda sites: (-len(sites), sites)): nchannels = len(all_channels[sites]) nstations = len(all_stations[sites]) nchannels_all += nchannels nstations_all += nstations logger.info('selected (%s): %i channel%s (%i station%s)' % ('+'.join(sites), nchannels, plural_s(nchannels), nstations, plural_s(nstations))) logger.info('selected total: %i channel%s (%i station%s)' % (nchannels_all, plural_s(nchannels_all), nstations_all, plural_s(nstations_all))) logger.info('dry run done.') sys.exit(0) for nslc in have_data: # if we are in continue mode, we have to guess where the data came from if not any(nslc in have_data_site[site] for site in sites): for site in sites: if nslc in could_have_data_site[site]: have_data_site[site].add(nslc) sxs = {} for site in sites: selection = [] for nslc in sorted(have_data_site[site]): selection.append(nslc + (tmin - tpad, tmax + tpad)) if selection: logger.info('downloading response information (%s)' % site) sxs[site] = fdsn.station(site=site, level='response', selection=selection) sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' % site)) # chapter 1.5: inject local data if options.local_data: have_data_site['local'] = set() plocal = pile.make_pile(options.local_data, fileformat='detect') logger.info( 'Importing local data from %s between %s (%f) and %s (%f)' % (options.local_data, util.time_to_str(tmin), tmin, util.time_to_str(tmax), tmax)) for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id, tmin=tmin, tmax=tmax, tinc=tinc): for tr in traces: if tr.nslc_id not in have_data: fns.extend(io.save(traces, fn_template_raw)) have_data_site['local'].add(tr.nslc_id) have_data.add(tr.nslc_id) sites.append('local') if options.local_responses_pz: sxs['local'] = epz.make_stationxml( epz.iload(options.local_responses_pz)) if options.local_responses_resp: local_stations = [] for fn in options.local_stations: local_stations.extend(model.load_stations(fn)) sxs['local'] = resp.make_stationxml( local_stations, resp.iload(options.local_responses_resp)) if options.local_responses_stationxml: sxs['local'] = stationxml.load_xml( filename=options.local_responses_stationxml) # chapter 1.6: dump raw data stations file nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())] util.ensuredirs(fn_stations_raw) model.dump_stations(stations, fn_stations_raw) dump_commandline(sys.argv, fn_commandline) # chapter 2: restitution if not fns: logger.error('no data available') sys.exit(1) p = pile.make_pile(fns, show_progress=False) p.get_deltatmin() otinc = None if otinc is None: otinc = nice_seconds_floor(p.get_deltatmin() * 500000.) otinc = 3600. otmin = math.floor(p.tmin / otinc) * otinc otmax = math.ceil(p.tmax / otinc) * otinc otpad = tpad * 2 fns = [] rest_traces_b = [] win_b = None for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id, tmin=otmin, tmax=otmax, tinc=otinc, tpad=otpad): rest_traces_a = [] win_a = None for tr in traces_a: win_a = tr.wmin, tr.wmax if win_b and win_b[0] >= win_a[0]: fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) rest_traces_b = [] win_b = None response = None failure = [] for site in sites: try: if site not in sxs: continue logger.debug('Getting response for %s' % tr.__str__()) response = sxs[site].get_pyrocko_response( tr.nslc_id, timespan=(tr.tmin, tr.tmax), fake_input_units=options.output_units) break except stationxml.NoResponseInformation: failure.append('%s: no response information' % site) except stationxml.MultipleResponseInformation: failure.append('%s: multiple response information' % site) if response is None: failure = ', '.join(failure) else: failure = '' try: if tr.tmin > tmin and options.zero_pad: logger.warning( 'Trace too short for clean restitution in ' 'desired frequency band -> zero-padding!') tr.extend(tr.tmin - tfade, tr.tmax + tfade, 'repeat') rest_tr = tr.transfer(tfade, ftap, response, invert=True) rest_traces_a.append(rest_tr) except (trace.TraceTooShort, trace.NoData): failure = 'trace too short' if failure: logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' % (tr.nslc_id + (failure, ))) if rest_traces_b: rest_traces = trace.degapper(rest_traces_b + rest_traces_a, deoverlap='crossfade_cos') fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest)) rest_traces_a = [] if win_a: for tr in rest_traces: try: rest_traces_a.append( tr.chop(win_a[0], win_a[1] + otpad, inplace=False)) except trace.NoData: pass rest_traces_b = rest_traces_a win_b = win_a fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) # chapter 3: rotated restituted traces for inspection if not event: sys.exit(0) fn_template1 = \ 'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s' fn_waveforms = op.join(output_dir, 'prepared', fn_template1) fn_stations = op.join(output_dir, 'stations.prepared.txt') fn_event = op.join(event_dir, 'event.txt') fn_event_yaml = op.join(event_dir, 'event.yaml') nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s p = pile.make_pile(fns, show_progress=False) deltat = None if sample_rate is not None: deltat = 1.0 / sample_rate traces_beat = [] used_stations = [] for nsl, s in nsl_to_station.items(): s.set_event_relative_data(event) traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl) if options.out_components == 'rtu': pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z')) elif options.out_components == 'enu': pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z')) else: assert False for (proj, in_channels, out_channels) in pios: proc = trace.project(traces, proj, in_channels, out_channels) for tr in proc: tr_beat = heart.SeismicDataset.from_pyrocko_trace(tr) traces_beat.append(tr_beat) for ch in out_channels: if ch.name == tr.channel: s.add_channel(ch) if proc: io.save(proc, fn_waveforms) used_stations.append(s) stations = list(used_stations) util.ensuredirs(fn_stations) model.dump_stations(stations, fn_stations) model.dump_events([event], fn_event) from pyrocko.guts import dump dump([event], filename=fn_event_yaml) utility.dump_objects(op.join(cwd, 'seismic_data.pkl'), outlist=[stations, traces_beat]) logger.info('prepared waveforms from %i stations' % len(stations))
import numpy as num from pyrocko import orthodrome ncoords = 1000 # First set of coordinates lats_a = num.random.uniform(-180., 180., ncoords) lons_a = num.random.uniform(-90., 90., ncoords) # Second set of coordinates lats_b = num.random.uniform(-180., 180., ncoords) lons_b = num.random.uniform(-90., 90., ncoords) orthodrome.distance_accurate50m_numpy(lats_a, lons_a, lats_b, lons_b)
def call(self): if not self.viewer_connected: self.get_viewer().about_to_close.connect( self.file_serving_worker.stop) self.viewer_connected = True try: from OpenGL import GL # noqa except ImportError: logger.warn( 'Could not find package OpenGL, ' 'if the map does not work try installing OpenGL\n' 'e.g. sudo pip install PyOpenGL') self.cleanup() try: viewer = self.get_viewer() cli_mode = False except NoViewerSet: viewer = None cli_mode = True if not cli_mode: if self.only_active: _, active_stations = \ self.get_active_event_and_stations() else: active_stations = viewer.stations.values() elif cli_mode: active_stations = self.stations station_list = [] if active_stations: for stat in active_stations: is_blacklisted = util.match_nslc(viewer.blacklist, stat.nsl()) if (viewer and not is_blacklisted) or cli_mode: xml_station_marker = XMLStationMarker( nsl='.'.join(stat.nsl()), longitude=float(stat.lon), latitude=float(stat.lat), active='yes') station_list.append(xml_station_marker) active_station_list = StationMarkerList(stations=station_list) if self.only_active: markers = [viewer.get_active_event_marker()] else: if cli_mode: markers = self.markers else: markers = self.get_selected_markers() if len(markers) == 0: tmin, tmax = self.get_selected_time_range(fallback=True) markers = [m for m in viewer.get_markers() if isinstance(m, gui_util.EventMarker) and m.tmin >= tmin and m.tmax <= tmax] ev_marker_list = [] for m in markers: if not isinstance(m, gui_util.EventMarker): continue xmleventmarker = convert_event_marker(m) if xmleventmarker is None: continue ev_marker_list.append(xmleventmarker) event_list = EventMarkerList(events=ev_marker_list) event_station_list = MarkerLists( station_marker_list=active_station_list, event_marker_list=event_list) event_station_list.validate() if self.map_kind != 'GMT': tempdir = self.marker_tempdir if self.map_kind == 'Google Maps': map_fn = 'map_googlemaps.html' elif self.map_kind == 'OpenStreetMap': map_fn = 'map_osm.html' url = 'http://localhost:' + str(self.port) + '/%s' % map_fn files = ['loadxmldoc.js', 'map_util.js', 'plates.kml', map_fn] snuffling_dir = op.dirname(op.abspath(__file__)) for entry in files: shutil.copy(os.path.join(snuffling_dir, entry), os.path.join(tempdir, entry)) logger.debug('copied data to %s' % tempdir) markers_fn = os.path.join(self.marker_tempdir, 'markers.xml') self.data_proxy.content_to_serve.emit(self.port) dump_xml(event_station_list, filename=markers_fn) if self.open_external: qg.QDesktopServices.openUrl(qc.QUrl(url)) else: global g_counter g_counter += 1 self.web_frame( url, name='Map %i (%s)' % (g_counter, self.map_kind)) else: lats_all = [] lons_all = [] slats = [] slons = [] slabels = [] for s in active_stations: slats.append(s.lat) slons.append(s.lon) slabels.append('.'.join(s.nsl())) elats = [] elons = [] elats = [] elons = [] psmeca_input = [] markers = self.get_selected_markers() for m in markers: if isinstance(m, gui_util.EventMarker): e = m.get_event() elats.append(e.lat) elons.append(e.lon) if e.moment_tensor is not None: mt = e.moment_tensor.m6() psmeca_input.append( (e.lon, e.lat, e.depth/1000., mt[0], mt[1], mt[2], mt[3], mt[4], mt[5], 1., e.lon, e.lat, e.name)) else: if e.magnitude is None: moment = -1. else: moment = moment_tensor.magnitude_to_moment( e.magnitude) psmeca_input.append( (e.lon, e.lat, e.depth/1000., moment/3., moment/3., moment/3., 0., 0., 0., 1., e.lon, e.lat, e.name)) lats_all.extend(elats) lons_all.extend(elons) lats_all.extend(slats) lons_all.extend(slons) lats_all = num.array(lats_all) lons_all = num.array(lons_all) if len(lats_all) == 0: return center_lat, center_lon = ortho.geographic_midpoint( lats_all, lons_all) ntotal = len(lats_all) clats = num.ones(ntotal) * center_lat clons = num.ones(ntotal) * center_lon dists = ortho.distance_accurate50m_numpy( clats, clons, lats_all, lons_all) maxd = num.max(dists) or 0. m = Map( lat=center_lat, lon=center_lon, radius=max(10000., maxd) * 1.1, width=35, height=25, show_grid=True, show_topo=True, color_dry=(238, 236, 230), topo_cpt_wet='light_sea_uniform', topo_cpt_dry='light_land_uniform', illuminate=True, illuminate_factor_ocean=0.15, show_rivers=False, show_plates=False) m.gmt.psxy(in_columns=(slons, slats), S='t15p', G='black', *m.jxyr) for i in range(len(active_stations)): m.add_label(slats[i], slons[i], slabels[i]) m.gmt.psmeca( in_rows=psmeca_input, S='m1.0', G='red', C='5p,0/0/0', *m.jxyr) tmpdir = self.tempdir() self.outfn = os.path.join(tmpdir, '%i.png' % self.figcount) m.save(self.outfn) f = self.pixmap_frame(self.outfn) # noqa
def update_distances_and_angles(self, indices=None, want_angles=False, want_distances=False): '''Calculate and update distances and kagan angles between events. :param indices: list of lists of indices (optional) Ideally, indices are consecutive for best performance.''' want_angles = want_angles or \ not self.marker_table_view.isColumnHidden( _column_mapping['Kagan Angle [deg]']) want_distances = want_distances or \ not self.marker_table_view.isColumnHidden( _column_mapping['Dist [km]']) if not (want_distances or want_angles): return indices = indices or [[]] indices = [i for ii in indices for i in ii] if len(indices) != 1: return if self.last_active_event == self.pile_viewer.get_active_event(): return else: self.last_active_event = self.pile_viewer.get_active_event() markers = self.pile_viewer.markers nmarkers = len(markers) omarker = markers[indices[0]] if not isinstance(omarker, EventMarker): return else: oevent = omarker.get_event() emarkers = [m for m in markers if isinstance(m, EventMarker)] if len(emarkers) < 2: return else: events = [em.get_event() for em in emarkers] nevents = len(events) if want_distances: lats = num.zeros(nevents) lons = num.zeros(nevents) for i in range(nevents): lats[i] = events[i].lat lons[i] = events[i].lon olats = num.zeros(nevents) olons = num.zeros(nevents) olats[:] = oevent.lat olons[:] = oevent.lon dists = orthodrome.distance_accurate50m_numpy( lats, lons, olats, olons) dists /= 1000. dists = [round(x, 1) for x in dists] self.distances = dict(list(zip(emarkers, dists))) if want_angles: if oevent.moment_tensor: for em in emarkers: e = em.get_event() if e.moment_tensor: a = kagan_angle(oevent.moment_tensor, e.moment_tensor) self.kagan_angles[em] = a else: self.kagan_angles = {} istart = self.index(0, _column_mapping['Dist [km]']) istop = self.index(nmarkers-1, _column_mapping['Kagan Angle [deg]']) self.dataChanged.emit( istart, istop)
def call(self): if not self.viewer_connected: self.get_viewer().about_to_close.connect( self.file_serving_worker.stop) self.viewer_connected = True try: from OpenGL import GL # noqa except ImportError: logger.warn('Could not find package OpenGL, ' 'if the map does not work try installing OpenGL\n' 'e.g. sudo pip install PyOpenGL') self.cleanup() try: viewer = self.get_viewer() cli_mode = False except NoViewerSet: viewer = None cli_mode = True if not cli_mode: if self.only_active: _, active_stations = \ self.get_active_event_and_stations() else: active_stations = viewer.stations.values() elif cli_mode: active_stations = self.stations station_list = [] if active_stations: for stat in active_stations: is_blacklisted = util.match_nslc(viewer.blacklist, stat.nsl()) if (viewer and not is_blacklisted) or cli_mode: xml_station_marker = XMLStationMarker( nsl='.'.join(stat.nsl()), longitude=float(stat.lon), latitude=float(stat.lat), active='yes') station_list.append(xml_station_marker) active_station_list = StationMarkerList(stations=station_list) if self.only_active: markers = [viewer.get_active_event_marker()] else: if cli_mode: markers = self.markers else: markers = self.get_selected_markers() if len(markers) == 0: tmin, tmax = self.get_selected_time_range(fallback=True) markers = [ m for m in viewer.get_markers() if isinstance(m, gui_util.EventMarker) and m.tmin >= tmin and m.tmax <= tmax ] ev_marker_list = [] for m in markers: if not isinstance(m, gui_util.EventMarker): continue xmleventmarker = convert_event_marker(m) if xmleventmarker is None: continue ev_marker_list.append(xmleventmarker) event_list = EventMarkerList(events=ev_marker_list) event_station_list = MarkerLists( station_marker_list=active_station_list, event_marker_list=event_list) event_station_list.validate() if self.map_kind != 'GMT': tempdir = self.marker_tempdir if self.map_kind == 'Google Maps': map_fn = 'map_googlemaps.html' elif self.map_kind == 'OpenStreetMap': map_fn = 'map_osm.html' url = 'http://localhost:' + str(self.port) + '/%s' % map_fn files = ['loadxmldoc.js', 'map_util.js', 'plates.kml', map_fn] snuffling_dir = op.dirname(op.abspath(__file__)) for entry in files: shutil.copy(os.path.join(snuffling_dir, entry), os.path.join(tempdir, entry)) logger.debug('copied data to %s' % tempdir) markers_fn = os.path.join(self.marker_tempdir, 'markers.xml') self.data_proxy.content_to_serve.emit(self.port) dump_xml(event_station_list, filename=markers_fn) if self.open_external: qg.QDesktopServices.openUrl(qc.QUrl(url)) else: global g_counter g_counter += 1 self.web_frame(url, name='Map %i (%s)' % (g_counter, self.map_kind)) else: lats_all = [] lons_all = [] slats = [] slons = [] slabels = [] for s in active_stations: slats.append(s.lat) slons.append(s.lon) slabels.append('.'.join(s.nsl())) elats = [] elons = [] elats = [] elons = [] psmeca_input = [] markers = self.get_selected_markers() for m in markers: if isinstance(m, gui_util.EventMarker): e = m.get_event() elats.append(e.lat) elons.append(e.lon) if e.moment_tensor is not None: mt = e.moment_tensor.m6() psmeca_input.append((e.lon, e.lat, e.depth / 1000., mt[0], mt[1], mt[2], mt[3], mt[4], mt[5], 1., e.lon, e.lat, e.name)) else: if e.magnitude is None: moment = -1. else: moment = moment_tensor.magnitude_to_moment( e.magnitude) psmeca_input.append( (e.lon, e.lat, e.depth / 1000., moment / 3., moment / 3., moment / 3., 0., 0., 0., 1., e.lon, e.lat, e.name)) lats_all.extend(elats) lons_all.extend(elons) lats_all.extend(slats) lons_all.extend(slons) lats_all = num.array(lats_all) lons_all = num.array(lons_all) if len(lats_all) == 0: return center_lat, center_lon = ortho.geographic_midpoint( lats_all, lons_all) ntotal = len(lats_all) clats = num.ones(ntotal) * center_lat clons = num.ones(ntotal) * center_lon dists = ortho.distance_accurate50m_numpy(clats, clons, lats_all, lons_all) maxd = num.max(dists) or 0. m = Map(lat=center_lat, lon=center_lon, radius=max(10000., maxd) * 1.1, width=35, height=25, show_grid=True, show_topo=True, color_dry=(238, 236, 230), topo_cpt_wet='light_sea_uniform', topo_cpt_dry='light_land_uniform', illuminate=True, illuminate_factor_ocean=0.15, show_rivers=False, show_plates=False) m.gmt.psxy(in_columns=(slons, slats), S='t15p', G='black', *m.jxyr) for i in range(len(active_stations)): m.add_label(slats[i], slons[i], slabels[i]) m.gmt.psmeca(in_rows=psmeca_input, S='m1.0', G='red', C='5p,0/0/0', *m.jxyr) tmpdir = self.tempdir() self.outfn = os.path.join(tmpdir, '%i.png' % self.figcount) m.save(self.outfn) f = self.pixmap_frame(self.outfn) # noqa
def main(args=None): if args is None: args = sys.argv[1:] parser = OptionParser( usage=usage, description=description) parser.add_option( '--width', dest='width', type='float', default=20.0, metavar='FLOAT', help='set width of output image [cm] (%default)') parser.add_option( '--height', dest='height', type='float', default=15.0, metavar='FLOAT', help='set height of output image [cm] (%default)') parser.add_option( '--topo-resolution-min', dest='topo_resolution_min', type='float', default=40.0, metavar='FLOAT', help='minimum resolution of topography [dpi] (%default)') parser.add_option( '--topo-resolution-max', dest='topo_resolution_max', type='float', default=200.0, metavar='FLOAT', help='maximum resolution of topography [dpi] (%default)') parser.add_option( '--no-grid', dest='show_grid', default=True, action='store_false', help='don\'t show grid lines') parser.add_option( '--no-topo', dest='show_topo', default=True, action='store_false', help='don\'t show topography') parser.add_option( '--no-cities', dest='show_cities', default=True, action='store_false', help='don\'t show cities') parser.add_option( '--no-illuminate', dest='illuminate', default=True, action='store_false', help='deactivate artificial illumination of topography') parser.add_option( '--illuminate-factor-land', dest='illuminate_factor_land', type='float', metavar='FLOAT', help='set factor for artificial illumination of land (0.5)') parser.add_option( '--illuminate-factor-ocean', dest='illuminate_factor_ocean', type='float', metavar='FLOAT', help='set factor for artificial illumination of ocean (0.25)') parser.add_option( '--theme', choices=['topo', 'soft'], default='topo', help='select color theme, available: topo, soft (topo)"') parser.add_option( '--download-etopo1', dest='download_etopo1', action='store_true', help='download full ETOPO1 topography dataset') parser.add_option( '--download-srtmgl3', dest='download_srtmgl3', action='store_true', help='download full SRTMGL3 topography dataset') parser.add_option( '--make-decimated-topo', dest='make_decimated', action='store_true', help='pre-make all decimated topography datasets') parser.add_option( '--stations', dest='stations_fn', metavar='FILENAME', help='load station coordinates from FILENAME') parser.add_option( '--events', dest='events_fn', metavar='FILENAME', help='load event coordinates from FILENAME') parser.add_option( '--debug', dest='debug', action='store_true', default=False, help='print debugging information to stderr') (options, args) = parser.parse_args(args) if options.debug: util.setup_logging(program_name, 'debug') else: util.setup_logging(program_name, 'info') if options.download_etopo1: import pyrocko.datasets.topo.etopo1 pyrocko.datasets.topo.etopo1.download() if options.download_srtmgl3: import pyrocko.datasets.topo.srtmgl3 pyrocko.datasets.topo.srtmgl3.download() if options.make_decimated: import pyrocko.datasets.topo pyrocko.datasets.topo.make_all_missing_decimated() if (options.download_etopo1 or options.download_srtmgl3 or options.make_decimated) and len(args) == 0: sys.exit(0) if options.theme == 'soft': color_kwargs = { 'illuminate_factor_land': options.illuminate_factor_land or 0.2, 'illuminate_factor_ocean': options.illuminate_factor_ocean or 0.15, 'color_wet': (216, 242, 254), 'color_dry': (238, 236, 230), 'topo_cpt_wet': 'light_sea_uniform', 'topo_cpt_dry': 'light_land_uniform'} elif options.theme == 'topo': color_kwargs = { 'illuminate_factor_land': options.illuminate_factor_land or 0.5, 'illuminate_factor_ocean': options.illuminate_factor_ocean or 0.25} events = [] if options.events_fn: events = model.load_events(options.events_fn) stations = [] if options.stations_fn: stations = model.load_stations(options.stations_fn) if not (len(args) == 4 or ( len(args) == 1 and (events or stations))): parser.print_help() sys.exit(1) if len(args) == 4: try: lat = float(args[0]) lon = float(args[1]) radius = float(args[2])*km except Exception: parser.print_help() sys.exit(1) else: lats, lons = latlon_arrays(stations+events) lat, lon = map(float, od.geographic_midpoint(lats, lons)) radius = float( num.max(od.distance_accurate50m_numpy(lat, lon, lats, lons))) radius *= 1.1 m = automap.Map( width=options.width, height=options.height, lat=lat, lon=lon, radius=radius, topo_resolution_max=options.topo_resolution_max, topo_resolution_min=options.topo_resolution_min, show_topo=options.show_topo, show_grid=options.show_grid, illuminate=options.illuminate, **color_kwargs) logger.debug('map configuration:\n%s' % str(m)) if options.show_cities: m.draw_cities() if stations: lats = [s.lat for s in stations] lons = [s.lon for s in stations] m.gmt.psxy( in_columns=(lons, lats), S='t8p', G='black', *m.jxyr) for s in stations: m.add_label(s.lat, s.lon, '%s' % '.'.join( x for x in s.nsl() if x)) if events: beachball_symbol = 'mt' beachball_size = 20.0 for ev in events: if ev.moment_tensor is None: m.gmt.psxy( in_rows=[[ev.lon, ev.lat]], S='c12p', G=gmtpy.color('scarletred2'), W='1p,black', *m.jxyr) else: devi = ev.moment_tensor.deviatoric() mt = devi.m_up_south_east() mt = mt / ev.moment_tensor.scalar_moment() \ * pmt.magnitude_to_moment(5.0) m6 = pmt.to6(mt) data = (ev.lon, ev.lat, 10) + tuple(m6) + (1, 0, 0) if m.gmt.is_gmt5(): kwargs = dict( M=True, S='%s%g' % ( beachball_symbol[0], beachball_size / gmtpy.cm)) else: kwargs = dict( S='%s%g' % ( beachball_symbol[0], beachball_size*2 / gmtpy.cm)) m.gmt.psmeca( in_rows=[data], G=gmtpy.color('chocolate1'), E='white', W='1p,%s' % gmtpy.color('chocolate3'), *m.jxyr, **kwargs) m.save(args[-1])
def main(): parser = OptionParser(usage=usage, description=description) parser.add_option( "--force", dest="force", action="store_true", default=False, help="allow recreation of output <directory>", ) parser.add_option( "--debug", dest="debug", action="store_true", default=False, help="print debugging information to stderr", ) parser.add_option( "--dry-run", dest="dry_run", action="store_true", default=False, help="show available stations/channels and exit " "(do not download waveforms)", ) parser.add_option( "--continue", dest="continue_", action="store_true", default=False, help="continue download after a accident", ) parser.add_option( "--local-data", dest="local_data", action="append", help="add file/directory with local data", ) parser.add_option( "--local-stations", dest="local_stations", action="append", help="add local stations file", ) parser.add_option( "--local-responses-resp", dest="local_responses_resp", action="append", help="add file/directory with local responses in RESP format", ) parser.add_option( "--local-responses-pz", dest="local_responses_pz", action="append", help="add file/directory with local pole-zero responses", ) parser.add_option( "--local-responses-stationxml", dest="local_responses_stationxml", help="add file with local response information in StationXML format", ) parser.add_option( "--window", dest="window", default="full", help='set time window to choose [full, p, "<time-start>,<time-end>"' "] (time format is YYYY-MM-DD HH:MM:SS)", ) parser.add_option( "--out-components", choices=["enu", "rtu"], dest="out_components", default="rtu", help="set output component orientations to radial-transverse-up [rtu] " "(default) or east-north-up [enu]", ) parser.add_option( "--padding-factor", type=float, default=3.0, dest="padding_factor", help="extend time window on either side, in multiples of 1/<fmin_hz> " "(default: 5)", ) parser.add_option( "--credentials", dest="user_credentials", action="append", default=[], metavar="SITE,USER,PASSWD", help="user credentials for specific site to access restricted data " "(this option can be repeated)", ) parser.add_option( "--token", dest="auth_tokens", metavar="SITE,FILENAME", action="append", default=[], help="user authentication token for specific site to access " "restricted data (this option can be repeated)", ) parser.add_option( "--sites", dest="sites", metavar="SITE1,SITE2,...", # default='bgr', default="http://ws.gpi.kit.edu,bgr,http://188.246.25.142:8080", help='sites to query (available: %s, default: "%%default"' % ", ".join(g_sites_available), ) parser.add_option( "--band-codes", dest="priority_band_code", metavar="V,L,M,B,H,S,E,...", default="V,L,M,B,H,E", help="select and prioritize band codes (default: %default)", ) parser.add_option( "--instrument-codes", dest="priority_instrument_code", metavar="H,L,G,...", default="H,L,O,", help="select and prioritize instrument codes (default: %default)", ) parser.add_option( "--radius-min", dest="radius_min", metavar="VALUE", default=0.0, type=float, help="minimum radius [km]", ) parser.add_option( "--tinc", dest="tinc", metavar="VALUE", default=3600.0 * 12.0, type=float, help="length of seperate saved files in s", ) parser.add_option( "--nstations-wanted", dest="nstations_wanted", metavar="N", type=int, help="number of stations to select initially", ) (options, args) = parser.parse_args(sys.argv[1:]) if len(args) not in (9, 6, 5): parser.print_help() sys.exit(1) if options.debug: util.setup_logging(program_name, "debug") else: util.setup_logging(program_name, "info") if options.local_responses_pz and options.local_responses_resp: logger.critical("cannot use local responses in PZ and RESP " "format at the same time") sys.exit(1) n_resp_opt = 0 for resp_opt in ( options.local_responses_pz, options.local_responses_resp, options.local_responses_stationxml, ): if resp_opt: n_resp_opt += 1 if n_resp_opt > 1: logger.critical("can only handle local responses from either PZ or " "RESP or StationXML. Cannot yet merge different " "response formats.") sys.exit(1) if options.local_responses_resp and not options.local_stations: logger.critical("--local-responses-resp can only be used " "when --stations is also given.") sys.exit(1) try: ename = "" magnitude = None mt = None if len(args) == 9: time = util.str_to_time(args[0] + " " + args[1]) lat = float(args[2]) lon = float(args[3]) depth = float(args[4]) * km iarg = 5 elif len(args) == 6: if args[1].find(":") == -1: sname_or_date = None lat = float(args[0]) lon = float(args[1]) event = None time = None else: sname_or_date = args[0] + " " + args[1] iarg = 2 elif len(args) == 5: sname_or_date = args[0] iarg = 1 if len(args) in (6, 5) and sname_or_date is not None: events = get_events_by_name_or_date([sname_or_date], catalog=geofon) if len(events) == 0: logger.critical("no event found") sys.exit(1) elif len(events) > 1: logger.critical("more than one event found") sys.exit(1) event = events[0] time = event.time lat = event.lat lon = event.lon depth = event.depth ename = event.name magnitude = event.magnitude mt = event.moment_tensor radius = float(args[iarg]) * km fmin = float(args[iarg + 1]) sample_rate = float(args[iarg + 2]) eventname = args[iarg + 3] event_dir = op.join("data", "events", eventname) output_dir = op.join(event_dir, "waveforms") except: raise parser.print_help() sys.exit(1) if options.force and op.isdir(event_dir): if not options.continue_: shutil.rmtree(event_dir) if op.exists(event_dir) and not options.continue_: logger.critical( 'directory "%s" exists. Delete it first or use the --force option' % event_dir) sys.exit(1) util.ensuredir(output_dir) if time is not None: event = model.Event( time=time, lat=lat, lon=lon, depth=depth, name=ename, magnitude=magnitude, moment_tensor=mt, ) if options.window == "full": if event is None: logger.critical("need event for --window=full") sys.exit(1) low_velocity = 1500.0 timewindow = VelocityWindow(low_velocity, tpad=options.padding_factor / fmin) tmin, tmax = timewindow(time, radius, depth) elif options.window == "p": if event is None: logger.critical("need event for --window=p") sys.exit(1) phases = list(map(cake.PhaseDef, "P p".split())) emod = cake.load_model() tpad = options.padding_factor / fmin timewindow = PhaseWindow(emod, phases, -tpad, tpad) arrivaltimes = [] for dist in num.linspace(0, radius, 20): try: arrivaltimes.extend(timewindow(time, dist, depth)) except NoArrival: pass if not arrivaltimes: logger.error("required phase arrival not found") sys.exit(1) tmin = min(arrivaltimes) tmax = max(arrivaltimes) else: try: stmin, stmax = options.window.split(",") tmin = util.str_to_time(stmin.strip()) tmax = util.str_to_time(stmax.strip()) timewindow = FixedWindow(tmin, tmax) except ValueError: logger.critical('invalid argument to --window: "%s"' % options.window) sys.exit(1) if event is not None: event.name = eventname tlen = tmax - tmin tfade = tfade_factor / fmin tpad = tfade tmin -= tpad tmax += tpad priority_band_code = options.priority_band_code.split(",") for s in priority_band_code: if len(s) != 1: logger.critical("invalid band code: %s" % s) priority_instrument_code = options.priority_instrument_code.split(",") for s in priority_instrument_code: if len(s) != 1: logger.critical("invalid instrument code: %s" % s) station_query_conf = dict( latitude=lat, longitude=lon, minradius=options.radius_min * km * cake.m2d, maxradius=radius * cake.m2d, channel=",".join("?%s?" % s for s in priority_band_code), ) target_sample_rate = sample_rate fmax = target_sample_rate # target_sample_rate = None # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S'] priority_units = ["M/S", "M", "M/S**2"] output_units = "M" sites = [x.strip() for x in options.sites.split(",") if x.strip()] tinc = options.tinc # for site in sites: # if site not in g_sites_available: # logger.critical('unknown FDSN site: %s' % site) # sys.exit(1) for s in options.user_credentials: try: site, user, passwd = s.split(",") g_user_credentials[site] = user, passwd except ValueError: logger.critical('invalid format for user credentials: "%s"' % s) sys.exit(1) for s in options.auth_tokens: try: site, token_filename = s.split(",") with open(token_filename, "r") as f: g_auth_tokens[site] = f.read() except (ValueError, OSError, IOError): logger.critical("cannot get token from file: %s" % token_filename) sys.exit(1) fn_template0 = ( "data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed") fn_template_raw = op.join(output_dir, "raw", fn_template0) fn_template_raw_folder = op.join(output_dir, "raw/", "traces.mseed") fn_stations_raw = op.join(output_dir, "stations.raw.txt") fn_template_rest = op.join(output_dir, "rest", fn_template0) fn_commandline = op.join(output_dir, "seigerdown.command") ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax) # chapter 1: download sxs = [] for site in sites: try: extra_args = { "iris": dict(matchtimeseries=True), }.get(site, {}) extra_args.update(station_query_conf) if site == "geonet": extra_args.update(starttime=tmin, endtime=tmax) else: extra_args.update( startbefore=tmax, endafter=tmin, includerestricted=(site in g_user_credentials or site in g_auth_tokens), ) logger.info("downloading channel information (%s)" % site) sx = fdsn.station(site=site, format="text", level="channel", **extra_args) except fdsn.EmptyResult: logger.error("No stations matching given criteria. (%s)" % site) sx = None if sx is not None: sxs.append(sx) if all(sx is None for sx in sxs) and not options.local_data: sys.exit(1) nsl_to_sites = defaultdict(list) nsl_to_station = {} for sx, site in zip(sxs, sites): site_stations = sx.get_pyrocko_stations() for s in site_stations: nsl = s.nsl() nsl_to_sites[nsl].append(site) if nsl not in nsl_to_station: nsl_to_station[nsl] = s # using first site with this station logger.info("number of stations found: %i" % len(nsl_to_station)) # station weeding nsls_selected = None if options.nstations_wanted: stations_all = [ nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys()) ] for s in stations_all: s.set_event_relative_data(event) stations_selected = weeding.weed_stations(stations_all, options.nstations_wanted)[0] nsls_selected = set(s.nsl() for s in stations_selected) logger.info("number of stations selected: %i" % len(nsls_selected)) have_data = set() if options.continue_: fns = glob.glob(fn_template_raw % starfill()) p = pile.make_pile(fns) else: fns = [] have_data_site = {} could_have_data_site = {} for site in sites: have_data_site[site] = set() could_have_data_site[site] = set() available_through = defaultdict(set) it = 0 nt = int(math.ceil((tmax - tmin) / tinc)) for it in range(nt): tmin_win = tmin + it * tinc tmax_win = min(tmin + (it + 1) * tinc, tmax) logger.info("time window %i/%i (%s - %s)" % (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win))) have_data_this_window = set() if options.continue_: trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False) for tr in trs_avail: have_data_this_window.add(tr.nslc_id) for site, sx in zip(sites, sxs): if sx is None: continue selection = [] channels = sx.choose_channels( target_sample_rate=target_sample_rate, priority_band_code=priority_band_code, priority_units=priority_units, priority_instrument_code=priority_instrument_code, timespan=(tmin_win, tmax_win), ) for nslc in sorted(channels.keys()): if nsls_selected is not None and nslc[:3] not in nsls_selected: continue could_have_data_site[site].add(nslc) if nslc not in have_data_this_window: channel = channels[nslc] if event: lat_, lon_ = event.lat, event.lon else: lat_, lon_ = lat, lon dist = orthodrome.distance_accurate50m_numpy( lat_, lon_, channel.latitude.value, channel.longitude.value) if event: depth_ = event.depth time_ = event.time else: depth_ = None time_ = None tmin_, tmax_ = timewindow(time_, dist, depth_) tmin_this = tmin_ - tpad tmax_this = tmax_ + tpad tmin_req = max(tmin_win, tmin_this) tmax_req = min(tmax_win, tmax_this) if channel.sample_rate: deltat = 1.0 / channel.sample_rate.value else: deltat = 1.0 if tmin_req < tmax_req: # extend time window by some samples because otherwise # sometimes gaps are produced selection.append(nslc + (tmin_req - deltat * 10.0, tmax_req + deltat * 10.0)) if options.dry_run: for (net, sta, loc, cha, tmin, tmax) in selection: available_through[net, sta, loc, cha].add(site) else: neach = 100 i = 0 nbatches = ((len(selection) - 1) // neach) + 1 while i < len(selection): selection_now = selection[i:i + neach] f = tempfile.NamedTemporaryFile() try: sbatch = "" if nbatches > 1: sbatch = " (batch %i/%i)" % ( (i // neach) + 1, nbatches) logger.info("downloading data (%s)%s" % (site, sbatch)) data = fdsn.dataselect(site=site, selection=selection_now, **get_user_credentials(site)) while True: buf = data.read(1024) if not buf: break f.write(buf) f.flush() trs = io.load(f.name) for tr in trs: if tr.station == "7869": tr.station = "MOER" tr.network = "LE" tr.location = "" try: tr.chop(tmin_win, tmax_win) have_data.add(tr.nslc_id) have_data_site[site].add(tr.nslc_id) except trace.NoData: pass fns2 = io.save(trs, fn_template_raw) io.save(trs, fn_template_raw_folder) for fn in fns2: if fn in fns: logger.warn("overwriting file %s", fn) fns.extend(fns2) except fdsn.EmptyResult: pass except HTTPError: logger.warn("an error occurred while downloading data " "for channels \n %s" % "\n ".join(".".join(x[:4]) for x in selection_now)) f.close() i += neach if options.dry_run: nslcs = sorted(available_through.keys()) all_channels = defaultdict(set) all_stations = defaultdict(set) def plural_s(x): return "" if x == 1 else "s" for nslc in nslcs: sites = tuple(sorted(available_through[nslc])) logger.info("selected: %s.%s.%s.%s from site%s %s" % (nslc + (plural_s(len(sites)), "+".join(sites)))) all_channels[sites].add(nslc) all_stations[sites].add(nslc[:3]) nchannels_all = 0 nstations_all = 0 for sites in sorted(all_channels.keys(), key=lambda sites: (-len(sites), sites)): nchannels = len(all_channels[sites]) nstations = len(all_stations[sites]) nchannels_all += nchannels nstations_all += nstations logger.info("selected (%s): %i channel%s (%i station%s)" % ( "+".join(sites), nchannels, plural_s(nchannels), nstations, plural_s(nstations), )) logger.info("selected total: %i channel%s (%i station%s)" % ( nchannels_all, plural_s(nchannels_all), nstations_all, plural_s(nstations_all), )) logger.info("dry run done.") sys.exit(0) for nslc in have_data: # if we are in continue mode, we have to guess where the data came from if not any(nslc in have_data_site[site] for site in sites): for site in sites: if nslc in could_have_data_site[site]: have_data_site[site].add(nslc) sxs = {} for site in sites: selection = [] for nslc in sorted(have_data_site[site]): selection.append(nslc + (tmin - tpad, tmax + tpad)) if selection: logger.info("downloading response information (%s)" % site) sxs[site] = fdsn.station(site=site, level="response", selection=selection) sited = site if site == "http://192.168.11.220:8080": sited = "bgr_internal" elif site == "http://ws.gpi.kit.edu": sited = "kit" if site == "http://188.246.25.142:8080": sited = "moer" sxs[site].dump_xml(filename=op.join(output_dir, "stations.%s.xml" % sited)) # chapter 1.5: inject local data if options.local_data: have_data_site["local"] = set() plocal = pile.make_pile(options.local_data, fileformat="detect") for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id, tmin=tmin, tmax=tmax, tinc=tinc): for tr in traces: if tr.station == "7869": tr.station = "MOER" tr.network = "LE" tr.location = "" if tr.nslc_id not in have_data: fns.extend(io.save(traces, fn_template_raw)) have_data_site["local"].add(tr.nslc_id) have_data.add(tr.nslc_id) sites.append("local") if options.local_responses_pz: sxs["local"] = epz.make_stationxml( epz.iload(options.local_responses_pz)) if options.local_responses_resp: local_stations = [] for fn in options.local_stations: local_stations.extend(model.load_stations(fn)) sxs["local"] = resp.make_stationxml( local_stations, resp.iload(options.local_responses_resp)) if options.local_responses_stationxml: sxs["local"] = stationxml.load_xml( filename=options.local_responses_stationxml) # chapter 1.6: dump raw data stations file nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())] util.ensuredirs(fn_stations_raw) model.dump_stations(stations, fn_stations_raw) dump_commandline(sys.argv, fn_commandline) # chapter 2: restitution if not fns: logger.error("no data available") sys.exit(1) p = pile.make_pile(fns, show_progress=False) p.get_deltatmin() otinc = None if otinc is None: otinc = nice_seconds_floor(p.get_deltatmin() * 500000.0) otinc = 3600.0 otmin = math.floor(p.tmin / otinc) * otinc otmax = math.ceil(p.tmax / otinc) * otinc otpad = tpad * 2 fns = [] rest_traces_b = [] win_b = None for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id, tmin=otmin, tmax=otmax, tinc=otinc, tpad=otpad): rest_traces_a = [] win_a = None for tr in traces_a: if tr.station == "7869": tr.station = "MOER" tr.network = "LE" tr.location = "" win_a = tr.wmin, tr.wmax if win_b and win_b[0] >= win_a[0]: fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) rest_traces_b = [] win_b = None response = None failure = [] for site in sites: try: if site not in sxs: continue response = sxs[site].get_pyrocko_response( tr.nslc_id, timespan=(tr.tmin, tr.tmax), fake_input_units=output_units, ) break except stationxml.NoResponseInformation: failure.append("%s: no response information" % site) except stationxml.MultipleResponseInformation: failure.append("%s: multiple response information" % site) if response is None: failure = ", ".join(failure) else: failure = "" try: rest_tr = tr.transfer(tfade, ftap, response, invert=True) rest_traces_a.append(rest_tr) except (trace.TraceTooShort, trace.NoData): failure = "trace too short" if failure: logger.warn("failed to restitute trace %s.%s.%s.%s (%s)" % (tr.nslc_id + (failure, ))) if rest_traces_b: rest_traces = trace.degapper(rest_traces_b + rest_traces_a, deoverlap="crossfade_cos") fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest)) rest_traces_a = [] if win_a: for tr in rest_traces: if tr.station == "7869": tr.station = "MOER" tr.network = "LE" tr.location = "" try: rest_traces_a.append( tr.chop(win_a[0], win_a[1] + otpad, inplace=False)) except trace.NoData: pass rest_traces_b = rest_traces_a win_b = win_a fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) # chapter 3: rotated restituted traces for inspection if not event: sys.exit(0) fn_template1 = "DISPL.%(network)s.%(station)s.%(location)s.%(channel)s" fn_waveforms = op.join(output_dir, "prepared", fn_template1) fn_stations = op.join(output_dir, "stations.prepared.txt") fn_event = op.join(event_dir, "event.txt") nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s p = pile.make_pile(fns, show_progress=False) deltat = None if sample_rate is not None: deltat = 1.0 / sample_rate used_stations = [] for nsl, s in nsl_to_station.items(): s.set_event_relative_data(event) traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl) keep = [] for tr in traces: if deltat is not None: try: tr.downsample_to(deltat, snap=True, allow_upsample_max=5) keep.append(tr) except util.UnavailableDecimation as e: logger.warn("Cannot downsample %s.%s.%s.%s: %s" % (tr.nslc_id + (e, ))) continue if options.out_components == "rtu": pios = s.guess_projections_to_rtu(out_channels=("R", "T", "Z")) elif options.out_components == "enu": pios = s.guess_projections_to_enu(out_channels=("E", "N", "Z")) else: assert False for (proj, in_channels, out_channels) in pios: proc = trace.project(traces, proj, in_channels, out_channels) for tr in proc: for ch in out_channels: if ch.name == tr.channel: s.add_channel(ch) if proc: io.save(proc, fn_waveforms) used_stations.append(s) stations = list(used_stations) util.ensuredirs(fn_stations) model.dump_stations(stations, fn_stations) model.dump_events([event], fn_event) logger.info("prepared waveforms from %i stations" % len(stations))
def collectSemb(SembList, Config, Origin, Folder, ntimes, arrays, switch, array_centers): ''' method to collect semblance matrizes from all processes and write them to file for each timestep ''' Logfile.add('start collect in collectSemb') cfg = ConfigObj(dict=Config) origin = ConfigObj(dict=Origin) dimX = cfg.dimX() #('dimx') dimY = cfg.dimY() #('dimy') if switch == 0: winlen = cfg.winlen() #('winlen') step = cfg.step() #('step') if switch == 1: winlen = cfg.winlen_f2() #('winlen') step = cfg.step_f2() #('step') latv = [] lonv = [] gridspacing = cfg.Float('gridspacing') migpoints = dimX * dimY o_lat = origin.lat() # float(Origin['lat']) o_lon = origin.lon() # float(Origin['lon']) oLatul = 0 oLonul = 0 z = 0 for i in xrange(dimX): oLatul = o_lat - ((dimX - 1) / 2) * gridspacing + i * gridspacing if z == 0 and i == 0: Latul = oLatul o = 0 for j in xrange(dimY): oLonul = o_lon - ((dimY - 1) / 2) * gridspacing + j * gridspacing if o == 0 and j == 0: Lonul = oLonul latv.append(oLatul) lonv.append(oLonul) tmp = 1 origin = DataTypes.dictToLocation(Origin) i = 0 #for a in SembList: # tmp = num.zeros(num.shape(a)) azis = [] for a in SembList: x = array_centers[i][0] y = array_centers[i][1] delta = orthodrome.distance_accurate50m_numpy(x, y, origin.lat, origin.lon) #a = a*((1./delta**2)*1.e+15) tmp *= a #azis.append(toAzimuth(float(Origin['lat']), float(Origin['lon']),x, y)) i = i + 1 #min_coor = num.zeros([i,2]) #i = 0 #for a in SembList: # deltas = [] # x = array_centers[i][0] # y = array_centers[i][1] # for k in range(0,len(latv)): # delta = orthodrome.distance_accurate50m_numpy(x, y, latv[k], lonv[k]) # deltas.append(orthodrome.distance_accurate50m_numpy(x, y, latv[k], lonv[k])) # if delta <= num.min(deltas): # min_coor[i]= [latv[k], lonv[k]] # i = i+1 # array_overlap = num.average(min_coor, axis=0) # delta_center = orthodrome.distance_accurate50m_numpy(array_overlap[0], array_overlap[1], origin.lat, origin.lon) # print(array_overlap) # print(delta_center) # diff_center_lat = origin.lat-array_overlap[0] # diff_center_lon = origin.lon-array_overlap[1] # print(diff_center_lat) # print(diff_center_lon) #for a in SembList: #if num.mean(a)>0: # tmp *= a sembmaxvaluev = num.ndarray(ntimes, dtype=float) sembmaxlatv = num.ndarray(ntimes, dtype=float) sembmaxlonv = num.ndarray(ntimes, dtype=float) rc = UTCDateTime(Origin['time']) rcs = '%s-%s-%s_%02d:%02d:%02d' % (rc.day, rc.month, rc.year, rc.hour, rc.minute, rc.second) d = rc.timestamp usedarrays = arrays folder = Folder['semb'] fobjsembmax = open(os.path.join(folder, 'sembmax_%s.txt' % (switch)), 'w') norm = num.max(num.max(tmp, axis=1)) max_p = 0. sum_i = 0. for a, i in enumerate(tmp): if a < 1: sum_i *= i for a, i in enumerate(tmp): if a < 1: max = num.max(sum_i[:]) for j in range(migpoints): if i[j] > num.max(i[:]) * 0.9 and i[j] > max_p: latvmax = latv[j] lonvmax = lonv[j] max_p = i[j] # delta_lat = origin.lat-latvmax # delta_lon = origin.lon-lonvmax #for a, i in enumerate(tmp): # max_pos = [l for l, k in enumerate(i) if k == i.max()][0] # delta_lat = origin.lat-latv[max_pos] # delta_lon = origin.lon-lonv[max_pos] for j in range(migpoints): latv[j] = latv[j] #+delta_lat lonv[j] = lonv[j] #+delta_lon # latv.append(latv[j]-delta_lat) # lonv.append(lonv[j]-delta_lon) #nix = [] #for a, i in enumerate(tmp): # for j in range(migpoints): # if i[j]/norm > num.max(sum_i/norm)*0.4: # if j in nix: # pass # else: # latv[j] = latv[j]+delta_lat # lonv[j] = lonv[j]+delta_lon # nix.append(j) #if i[j]/norm > num.max(sum_i/norm)*0.4: # print('yes') # delta_lat = origin.lat-latv[j] # delta_lon = origin.lon-lonv[j] # print delta_lat, delta_lon, latvmax, lonvmax # print latv[j], lonv[j], origin.lat, origin.lon # ix = num.where(latv[j]+delta_lat)[0][0] # iy = num.where(lonv[j]+delta_lon)[0][0] # lat = latv[j].copy() # lon = lonv[j].copy() # latv[j] = latv[ix] ## lonv[j] = lonv[iy] # lonv[iy] # #latv[j] = latv[j]+delta_lat #lonv[j] = lonv[j]+delta_lon # print latv[j], lonv[j] # for a, i in enumerate(tmp): logger.info('timestep %d' % a) print(a) fobj = open( os.path.join(folder, '%s-%s_%03d.ASC' % (switch, Origin['depth'], a)), 'w') fobj.write('# %s , %s\n' % (d, rcs)) fobj.write('# step %ds| ntimes %d| winlen: %ds\n' % (step, ntimes, winlen)) fobj.write('# \n') fobj.write('# southwestlat: %.2f dlat: %f nlat: %f \n' % (Latul, gridspacing, dimX)) fobj.write('# southwestlon: %.2f dlon: %f nlon: %f \n' % (Lonul, gridspacing, dimY)) fobj.write('# ddepth: 0 ndepth: 1 \n') sembmax = 0 sembmaxX = 0 sembmaxY = 0 uncert = num.std(i) #maybe not std? for j in range(migpoints): x = latv[j] #+delta_lat y = lonv[j] #+delta_lon # if i[j]/norm > num.max(i[:]/norm)*0.1: # delta_lat = origin.lat-latv[max_pos] # delta_lon = origin.lon-lonv[max_pos] # print delta_lat, delta_lon, latv[max_pos], lonv[max_pos] # print latv[j], lonv[j], origin.lat, origin.lon # x = latv[j]+delta_lat # y = lonv[j]+delta_lon # print x, y semb = i[j] / norm fobj.write('%.2f %.2f %.20f\n' % (x, y, semb)) # xd= latv[j]-delta_lat # yd= lonv[j]-delta_lon # sembd = 0. # fobj.write('%.2f %.2f %.20f\n' %(xd,yd,sembd)) if semb > sembmax: sembmax = semb # search for maximum and position of maximum on semblance grid for given time step sembmaxX = x sembmaxY = y delta = orthodrome.distance_accurate50m_numpy(x, y, origin.lat, origin.lon) azi = toAzimuth(float(Origin['lat']), float(Origin['lon']), float(sembmaxX), float(sembmaxY)) sembmaxvaluev[a] = sembmax sembmaxlatv[a] = sembmaxX sembmaxlonv[a] = sembmaxY fobjsembmax.write('%d %.3f %.3f %.30f %.30f %d %03f %f %03f\n' % (a * step, sembmaxX, sembmaxY, sembmax, uncert, usedarrays, delta, float(azi), delta * 119.19)) fobj.close() fobjsembmax.close() trigger.writeSembMaxValue(sembmaxvaluev, sembmaxlatv, sembmaxlonv, ntimes, Config, Folder) inspect_semb = cfg.Bool('inspect_semb') if inspect_semb is True: trigger.semblancestalta(sembmaxvaluev, sembmaxlatv, sembmaxlonv) return sembmaxvaluev
def call(self): try: global vtk import vtk from vtk.util import numpy_support import sys sys.path[0:0] = [self.module_dir()] from grid_topo import setup_vtk_map_actor sys.path[0:1] = [] except ImportError as _import_error: self.fail('\nImportError:\n%s' % _import_error) vtk = None self.cleanup() stations = [] events = [] cone_actors = [] sphere_actors = [] if self.want_stations: stations = self.get_stations() if self.want_events: markers = self.get_selected_event_markers() if len(markers) == 0: tmin, tmax = self.get_selected_time_range(fallback=True) markers = filter(lambda x: tmin < x.tmin < tmax, self.get_event_markers()) events = [m.get_event() for m in markers] all_lats = [] all_lons = [] for s in stations: all_lats.append(s.lat) all_lons.append(s.lon) for e in events: all_lats.append(e.lat) all_lons.append(e.lon) center_lat, center_lon = ortho.geographic_midpoint( num.array(all_lats), num.array(all_lons)) center_lats = num.array([center_lat]*len(all_lats)) center_lons = num.array([center_lon]*len(all_lons)) distances = ortho.distance_accurate50m_numpy( num.array(all_lats), num.array(all_lons), center_lats, center_lons) distance_max = num.max(distances) size = distance_max / 50. if len(events) != 0: ns, es, depths = self.locations_to_ned(events) times = [e.time for e in events] adata = num.array((es, ns, -depths)) adata = adata.flatten(order='F') data = numpy_support.numpy_to_vtk( adata, deep=True, array_type=vtk.VTK_FLOAT) data.SetNumberOfComponents(3) sphere_actors = self.events_to_vtksphere_actors(data, times, size=size/2.) if len(stations) != 0: ns, es, depths = self.locations_to_ned(stations, has_elevation=True) adata = num.array((es, ns, -depths)).flatten(order='F') data = numpy_support.numpy_to_vtk( adata, deep=True, array_type=vtk.VTK_FLOAT) data.SetNumberOfComponents(3) cone_actors = self.stations_to_vtkcone_actors(data, size=size) if self.want_topo: distance_max += self.margin_radius * 1000 self.topo_actor = setup_vtk_map_actor( center_lat, center_lon, distance_max, super_elevation=self.z_scale, decimation=int(self.z_decimation or 1), smoothing=self.smoothing) self.frame = self.vtk_frame() for actor in cone_actors: actor.GetProperty().SetColor(0., 0., 1.) self.frame.add_actor(actor) for actor in sphere_actors: self.frame.add_actor(actor) if self.topo_actor: self.frame.add_actor(self.topo_actor) self.frame.renderer.SetBackground(0.01, 0.05, 0.1) self.frame.init()
derec_home = os.environ['DEREC_HOME'] store_id = 'doctar_mainland_20Hz' e = LocalEngine(store_superdirs=[pjoin(derec_home, 'fomostos')]) store = e.get_store(store_id) m = store.config.earthmodel_1d event = model.Event(load=pjoin(derec_home, 'mseeds/doctar/doctar_2011-11-01/doctar_2011-11-01_quakefile.dat')) statfn = pjoin(derec_home, 'mseeds/doctar/doctar_2011-11-01/stations.txt') stations = model.load_stations(statfn) stations_distance = collections.defaultdict() for s in stations: d = orthodrome.distance_accurate50m_numpy(s.lat,s.lon,event.lat, event.lon) stations_distance[d] = s.station #phases = ['p', 'P', 'Pv11.p'] phases = ['p'] colormap = collections.defaultdict() num_colors = 12 colors = list(num.linspace(0,254,num_colors)) cmap = plt.get_cmap('hsv') distances = num.linspace(1, 58, 120)*km*cake.m2d #distances = num.linspace(1, 58, 40)*km*cake.m2d t_diff = collections.defaultdict(list) t_diff_d = collections.defaultdict(list) toplot = []
def filter_oob(sources, targets, config): '''Filter sources that will be out of bounds of GF database.''' nsources, ntargets = len(sources), len(targets) slats, slons = num.empty(nsources), num.empty(nsources) sdepth = num.empty(nsources) tlats, tlons = num.empty(ntargets), num.empty(ntargets) televations = num.empty(ntargets) for i_s, s in enumerate(sources): slats[i_s], slons[i_s], sdepth[i_s] = *s.effective_latlon, s.depth for i_t, t in enumerate(targets): tlats[i_t], tlons[i_t] = t.effective_latlon televations[i_t] = t.elevation # dmax = config.distance_max # nsources = len(sources) # sources_out = [] # for i_s, s in enumerate(sources[::-1]): # for t in targets: # print(t.distance_to(s)) # if t.distance_to(s) > dmax: # print('break') # break # else: # print('fine') # sources_out.append(s) # return sources_out dists = num.empty((ntargets, nsources)) depths = num.empty((ntargets, nsources)) for i in range(ntargets): dists[i] = orthodrome.distance_accurate50m_numpy( slats, slons, tlats[i], tlons[i]) depths[i] = sdepth + televations[i] i_dist = num.where( num.any(num.logical_or(dists > config.distance_max - 100, dists < config.distance_min + 100), axis=0))[0] i_depth = num.where( num.any(num.logical_or(depths > config.source_depth_max - 100, depths < config.source_depth_min + 100), axis=0))[0] print(i_depth) i_filter = num.union1d(i_depth, i_dist) print(dists) logger.warn('Removing %i / %i sources which would be out of bounds' % (len(i_filter), nsources)) for i in i_filter[::-1]: # print('.'*10) # for t in targets: # print(sources[i].distance_to(t)) sources.pop(i) # del sources[i] _d = [t.distance_to(s) for t in targets for s in sources] print(max(_d)) print(min(_d)) _d = [s.depth + t.elevation for t in targets for s in sources] print(min(_d)) print(max(_d)) return sources
def testDistanceArrayC(self): ntest = 10000 locs = self.get_critical_random_locations(ntest) orthodrome.distance_accurate50m_numpy(*locs, implementation='c')
def prep_orient(datapath, st, loc, catalog, dir_ro, v_rayleigh, bp, dt_start, dt_stop, ccmin=0.80, plot_heatmap=False, plot_distr=False, debug=False): """ Perform orientation analysis using Rayleigh waves, main function. time wdw: 20s before 4.0km/s arrival and 600 s afterwards (Stachnik et al. 2012) - compute radial component for back values of 0 to 360 deg - for each c-c of hilbert(R) with Z comp. - call plotting functions and/or write results to file :param datapath: path to rrd data :param st: current station (pyrocko station object) :param catalog: list of pyrocko events used for analysis :param dir_ro: output directory :param plot_heatmap: bool, optional :param plot_distr: bool, optional """ logs = logging.getLogger('prep_orient') st_data_pile = pile.make_pile(datapath, regex='%s_%s_' % (st.network, st.station), show_progress=False) n_ev = len(catalog) if st_data_pile.tmin is not None and st_data_pile.tmax is not None: # calculate dist between all events and current station r_arr_by_ev = num.empty(n_ev) ev_lats = num.asarray([ev.lat for ev in catalog]) ev_lons = num.asarray([ev.lon for ev in catalog]) dists = distance_accurate50m_numpy(a_lats=ev_lats, a_lons=ev_lons, b_lats=st.lat, b_lons=st.lon, implementation='c') r_arr_by_ev = (dists / 1000.) / v_rayleigh cc_i_ev_vs_rota = num.empty((n_ev, 360)) rot_angles = range(-180, 180, 1) for i_ev, ev in enumerate(catalog): arrT = ev.time + r_arr_by_ev[i_ev] start_twd1 = ev.time end_twd1 = arrT + 1800 trZ = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'Z') trR = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'R') trT = get_tr_by_cha(st_data_pile, start_twd1, end_twd1, loc, 'T') start_twd2 = ev.time + r_arr_by_ev[i_ev] - dt_start end_twd2 = arrT + dt_stop if len(trZ) == 1 and len(trR) == 1 and len(trT) == 1: trZ = trZ[0] trR = trR[0] trT = trT[0] # debugging - window selection: if debug is True: trace.snuffle([trZ, trR, trT], markers=[ pm.Marker(nslc_ids=[ trZ.nslc_id, trR.nslc_id, trT.nslc_id ], tmin=start_twd2, tmax=end_twd2), pm.Marker(nslc_ids=[ trZ.nslc_id, trR.nslc_id, trT.nslc_id ], tmin=arrT, tmax=arrT + 3) ]) else: cc_i_ev_vs_rota[i_ev, :] = num.nan continue try: trZ.bandpass(bp[0], bp[1], bp[2]) trZ.chop(tmin=start_twd2, tmax=end_twd2) except trace.NoData: logs.warning('no data %s %s %s' % (trZ, trR, trT)) continue for i_r, r in enumerate(rot_angles): print('rotation angle [deg]: %5d' % r, end='\r') rot_2, rot_3 = trace.rotate(traces=[trR, trT], azimuth=r, in_channels=['R', 'T'], out_channels=['2', '3']) rot_2_y = rot_2.ydata rot_2_hilb = num.imag(trace.hilbert(rot_2_y, len(rot_2_y))) rot_2_hilb_tr = trace.Trace(deltat=rot_2.deltat, ydata=rot_2_hilb, tmin=rot_2.tmin) # problem: rot_2 and rot_2_hilb look exactly the same! # --> no phase shift. why? should be num.imag!!! # trace.snuffle([rot_2, rot_2_hilb_tr]) rot_2_hilb_tr.bandpass(bp[0], bp[1], bp[2]) rot_2_hilb_tr.chop(tmin=start_twd2, tmax=end_twd2) # if st.station == 'RORO' and r == 0: # trace.snuffle([rot_2_hilb_tr, trZ]) # normalize traces trZ.ydata /= abs(max(trZ.ydata)) rot_2_hilb_tr.ydata /= abs(max(rot_2_hilb_tr.ydata)) c = trace.correlate(trZ, rot_2_hilb_tr, mode='valid', normalization='normal') t, coef = c.max() t2, coef2 = max_or_min(c) ''' if st.station == 'MATE' and r == 0: print(i_ev, ev.name, ev.depth) print(r, t, coef, t2, coef2) trace.snuffle([trZ, trR, rot_2_hilb_tr]) ''' cc_i_ev_vs_rota[i_ev, i_r] = coef ''' if st.station == 'MATE': for i_ev in range(n_ev): print(num.argmax(cc_i_ev_vs_rota[i_ev,:]), num.max(cc_i_ev_vs_rota[i_ev,:])) ''' if plot_heatmap is True: fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(8, 2)) cax = ax.imshow(cc_i_ev_vs_rota, interpolation='nearest', vmin=-1.0, vmax=1.0, aspect='auto', extent=[-180, 180, n_ev, 0], cmap='binary') ax.set_ylabel('i_ev') ax.set_xlabel('Correction angle (deg)') ax.set_title('%s %s' % (st.network, st.station)) cbar = fig.colorbar(cax, ticks=[0, 0.5, 1.0], orientation='horizontal', fraction=0.05, pad=0.5) cbar.ax.set_xticklabels(['0', '0.5', '1.0']) plt.tight_layout() # plt.show(fig) fig.savefig( os.path.join( dir_ro, '%s_%s_%s_rot_cc_heatmap.png' % (st.network, st.station, loc))) plt.close() if plot_distr is True: plot_ccdistr_each_event(cc_i_ev_vs_rota, catalog, rot_angles, st, loc, dir_ro) median_a, mean_a, std_a, switched, n_ev =\ get_m_angle_switched(cc_i_ev_vs_rota, catalog, st, ccmin) dict_ev_angle = get_m_angle_all(cc_i_ev_vs_rota, catalog, st, ccmin) return median_a, mean_a, std_a, switched, dict_ev_angle, n_ev
for nslc in sorted(channels.keys()): if nsls_selected is not None and nslc[:3] not in nsls_selected: continue could_have_data_site[site].add(nslc) if nslc not in have_data_this_window: channel = channels[nslc] if event: lat_, lon_ = event.lat, event.lon else: lat_, lon_ = lat, lon dist = orthodrome.distance_accurate50m_numpy( lat_, lon_, channel.latitude.value, channel.longitude.value) if event: depth_ = event.depth time_ = event.time else: depth_ = None time_ = None tmin_, tmax_ = timewindow(time_, dist, depth_) tmin_this = tmin_ - tpad tmax_this = tmax_ + tpad tmin_req = max(tmin_win, tmin_this)
def optparse(required=(), optional=(), args=sys.argv, usage='%prog [options]', descr=None): want = required + optional parser = OptionParser(prog='cake', usage=usage, description=descr.capitalize() + '.', add_help_option=False, formatter=util.BetterHelpFormatter()) parser.add_option('-h', '--help', action='help', help='Show help message and exit.') if 'phases' in want: group = OptionGroup( parser, 'Phases', ''' Seismic phase arrivals may be either specified as traditional phase names (e.g. P, S, PP, PcP, ...) or in Cake's own syntax which is more powerful. Use the --classic option, for traditional phase names. Use the --phase option if you want to define phases in Cake's syntax. ''') group.add_option( '--phase', '--phases', dest='phases', action="append", default=[], metavar='PHASE1,PHASE2,...', help='''Comma separated list of seismic phases in Cake\'s syntax. The definition of a seismic propagation path in Cake's phase syntax is a string consisting of an alternating sequence of "legs" and "knees". A "leg" represents seismic wave propagation without any conversions, encountering only super-critical reflections. Legs are denoted by "P", "p", "S", or "s". The capital letters are used when the take-off of the "leg" is in downward direction, while the lower case letters indicate a take-off in upward direction. A "knee" is an interaction with an interface. It can be a mode conversion, a reflection, or propagation as a headwave or diffracted wave. * conversion is simply denoted as: "(INTERFACE)" or "DEPTH" * upperside reflection: "v(INTERFACE)" or "vDEPTH" * underside reflection: "^(INTERFACE)" or "^DEPTH" * normal kind headwave or diffracted wave: "v_(INTERFACE)" or "v_DEPTH" The interface may be given by name or by depth: INTERFACE is the name of an interface defined in the model, DEPTH is the depth of an interface in [km] (the interface closest to that depth is chosen). If two legs appear consecutively without an explicit "knee", surface interaction is assumed. The preferred standard interface names in cake are "conrad", "moho", "cmb" (core-mantle boundary), and "cb" (inner core boundary). The phase definition may end with a backslash "\\", to indicate that the ray should arrive at the receiver from above instead of from below. It is possible to restrict the maximum and minimum depth of a "leg" by appending "<(INTERFACE)" or "<DEPTH" or ">(INTERFACE)" or ">DEPTH" after the leg character, respectively. When plotting rays or travel-time curves, the color can be set by appending "{COLOR}" to the phase definition, where COLOR is the name of a color or an RGB or RGBA color tuple in the format "R/G/B" or "R/G/B/A", respectively. The values can be normalized to the range [0, 1] or to [0, 255]. The latter is only assumed when any of the values given exceeds 1.0. ''') group.add_option( '--classic', dest='classic_phases', action='append', default=[], metavar='PHASE1,PHASE2,...', help='''Comma separated list of seismic phases in classic nomenclature. Run "cake list-phase-map" for a list of available phase names. When plotting, color can be specified in the same way as in --phases.''') parser.add_option_group(group) if 'model' in want: group = OptionGroup(parser, 'Model') group.add_option( '--model', dest='model_filename', metavar='(NAME or FILENAME)', help='Use builtin model named NAME or user model from file ' 'FILENAME. By default, the "ak135-f-continental.m" model is ' 'used. Run "cake list-models" for a list of builtin models.') group.add_option( '--format', dest='model_format', metavar='FORMAT', choices=['nd', 'hyposat'], default='nd', help='Set model file format (available: nd, hyposat; default: ' 'nd).') group.add_option( '--crust2loc', dest='crust2loc', metavar='LAT,LON', help='Set model from CRUST2.0 profile at location (LAT,LON).') group.add_option( '--crust2profile', dest='crust2profile', metavar='KEY', help='Set model from CRUST2.0 profile with given KEY.') parser.add_option_group(group) if any(x in want for x in ('zstart', 'zstop', 'distances', 'sloc', 'rloc')): group = OptionGroup(parser, 'Source-receiver geometry') if 'zstart' in want: group.add_option('--sdepth', dest='sdepth', type='float', default=0.0, metavar='FLOAT', help='Source depth [km] (default: 0)') if 'zstop' in want: group.add_option('--rdepth', dest='rdepth', type='float', default=0.0, metavar='FLOAT', help='Receiver depth [km] (default: 0)') if 'distances' in want: group.add_option('--distances', dest='sdist', metavar='DISTANCES', help='Surface distances as "start:stop:n" or ' '"dist1,dist2,..." [km]') group.add_option('--sloc', dest='sloc', metavar='LAT,LON', help='Source location (LAT,LON).') group.add_option('--rloc', dest='rloc', metavar='LAT,LON', help='Receiver location (LAT,LON).') parser.add_option_group(group) if 'material' in want: group = OptionGroup( parser, 'Material', 'An isotropic elastic material may be specified by giving ' 'a combination of some of the following options. ') group.add_option('--vp', dest='vp', default=None, type='float', metavar='FLOAT', help='P-wave velocity [km/s]') group.add_option('--vs', dest='vs', default=None, type='float', metavar='FLOAT', help='S-wave velocity [km/s]') group.add_option('--rho', dest='rho', default=None, type='float', metavar='FLOAT', help='density [g/cm**3]') group.add_option('--qp', dest='qp', default=None, type='float', metavar='FLOAT', help='P-wave attenuation Qp (default: 1456)') group.add_option('--qs', dest='qs', default=None, type='float', metavar='FLOAT', help='S-wave attenuation Qs (default: 600)') group.add_option('--poisson', dest='poisson', default=None, type='float', metavar='FLOAT', help='Poisson ratio') group.add_option('--lambda', dest='lame_lambda', default=None, type='float', metavar='FLOAT', help='Lame parameter lambda [GPa]') group.add_option('--mu', dest='lame_mu', default=None, type='float', metavar='FLOAT', help='Shear modulus [GPa]') group.add_option('--qk', dest='qk', default=None, type='float', metavar='FLOAT', help='Bulk attenuation Qk') group.add_option('--qmu', dest='qmu', default=None, type='float', metavar='FLOAT', help='Shear attenuation Qmu') parser.add_option_group(group) if any(x in want for x in ('vred', 'as_degrees', 'accuracy', 'slowness', 'interface', 'aspect', 'shade_model')): group = OptionGroup(parser, 'General') if 'vred' in want: group.add_option('--vred', dest='vred', type='float', metavar='FLOAT', help='Velocity for time reduction in plot [km/s]') if 'as_degrees' in want: group.add_option( '--degrees', dest='as_degrees', action='store_true', default=False, help='Distances are in [deg] instead of [km], velocities in ' '[deg/s] instead of [km/s], slownesses in [s/deg] ' 'instead of [s/km].') if 'accuracy' in want: group.add_option('--accuracy', dest='accuracy', type='float', metavar='MAXIMUM_RELATIVE_RMS', default=0.002, help='Set accuracy for model simplification.') if 'slowness' in want: group.add_option( '--slowness', dest='slowness', type='float', metavar='FLOAT', default=0.0, help='Select surface slowness [s/km] (default: 0)') if 'interface' in want: group.add_option('--interface', dest='interface', metavar='(NAME or DEPTH)', help='Name or depth [km] of interface to select') if 'aspect' in want: group.add_option('--aspect', dest='aspect', type='float', metavar='FLOAT', help='Aspect ratio for plot') if 'shade_model' in want: group.add_option('--no-shade-model', dest='shade_model', action='store_false', default=True, help='Suppress shading of earth model layers') parser.add_option_group(group) if any(x in want for x in ('output_format', )): group = OptionGroup(parser, 'Output') if 'output_format' in want: group.add_option( '--output-format', dest='output_format', metavar='FORMAT', default='textual', choices=('textual', 'nd'), help='Set model output format (available: textual, nd, ' 'default: textual)') parser.add_option_group(group) if usage == 'cake help-options': parser.print_help() (options, args) = parser.parse_args(args) if len(args) != 2: parser.error( 'Cake arguments should look like "--option" or "--option=...".') d = {} as_degrees = False if 'as_degrees' in want: as_degrees = options.as_degrees d['as_degrees'] = as_degrees if 'accuracy' in want: d['accuracy'] = options.accuracy if 'output_format' in want: d['output_format'] = options.output_format if 'aspect' in want: d['aspect'] = options.aspect if 'shade_model' in want: d['shade_model'] = options.shade_model if 'phases' in want: phases = [] phase_colors = {} try: for ss in options.phases: for s in ss.split(','): s = process_color(s, phase_colors) phases.append(cake.PhaseDef(s)) for pp in options.classic_phases: for p in pp.split(','): p = process_color(p, phase_colors) phases.extend(cake.PhaseDef.classic(p)) except (cake.PhaseDefParseError, cake.UnknownClassicPhase) as e: parser.error(e) if not phases and 'phases' in required: s = process_color('P', phase_colors) phases.append(cake.PhaseDef(s)) if phases: d['phase_colors'] = phase_colors d['phases'] = phases if 'model' in want: if options.model_filename: d['model'] = cake.load_model(options.model_filename, options.model_format) if options.crust2loc or options.crust2profile: if options.crust2loc: try: args = tuple( [float(x) for x in options.crust2loc.split(',')]) except Exception: parser.error('format for --crust2loc option is ' '"LATITUDE,LONGITUDE"') elif options.crust2profile: args = (options.crust2profile.upper(), ) else: assert False if 'model' in d: d['model'] = d['model'].replaced_crust(args) else: from pyrocko import crust2x2 profile = crust2x2.get_profile(*args) d['model'] = cake.LayeredModel.from_scanlines( cake.from_crust2x2_profile(profile)) if 'vred' in want: d['vred'] = options.vred if d['vred'] is not None: if not as_degrees: d['vred'] *= r2d * cake.km / cake.earthradius if 'distances' in want: distances = None if options.sdist: if options.sdist.find(':') != -1: ssn = options.sdist.split(':') if len(ssn) != 3: parser.error('format for distances is ' '"min_distance:max_distance:n_distances"') distances = num.linspace(*map(float, ssn)) else: distances = num.array(list(map(float, options.sdist.split(','))), dtype=num.float) if not as_degrees: distances *= r2d * cake.km / cake.earthradius if options.sloc and options.rloc: try: slat, slon = tuple([float(x) for x in options.sloc.split(',')]) rlat, rlon = tuple([float(x) for x in options.rloc.split(',')]) except Exception: parser.error('format for --sloc and --rloc options is ' '"LATITUDE,LONGITUDE"') distance_sr = orthodrome.distance_accurate50m_numpy( slat, slon, rlat, rlon) distance_sr *= r2d / cake.earthradius if distances is not None: distances = num.concatenate((distances, [distance_sr])) else: distances = num.array([distance_sr], dtype=num.float) if distances is not None: d['distances'] = distances else: if 'distances' not in required: d['distances'] = None if 'slowness' in want: d['slowness'] = options.slowness / cake.d2r if not as_degrees: d['slowness'] /= cake.km * cake.m2d if 'interface' in want: if options.interface: try: d['interface'] = float(options.interface) * cake.km except ValueError: d['interface'] = options.interface else: d['interface'] = None if 'zstart' in want: d['zstart'] = options.sdepth * cake.km if 'zstop' in want: d['zstop'] = options.rdepth * cake.km if 'material' in want: md = {} userfactor = dict(vp=1000., vs=1000., rho=1000., qp=1., qs=1., qmu=1., qk=1., lame_lambda=1.0e9, lame_mu=1.0e9, poisson=1.) for k in userfactor.keys(): if getattr(options, k) is not None: md[k] = getattr(options, k) * userfactor[k] if not (bool('lame_lambda' in md) == bool('lame_mu' in md)): parser.error('lambda and mu must be specified both.') if 'lame_lambda' in md and 'lame_mu' in md: md['lame'] = md.pop('lame_lambda'), md.pop('lame_mu') if md: try: d['material'] = cake.Material(**md) except cake.InvalidArguments as e: parser.error(str(e)) for k in list(d.keys()): if k not in want: del d[k] for k in required: if k not in d: if k == 'model': d['model'] = cake.load_model('ak135-f-continental.m') elif k == 'distances': d['distances'] = num.linspace(10*cake.km, 100*cake.km, 10) \ / cake.earthradius * r2d elif k == 'phases': d['phases'] = list(map(cake.PhaseDef, 'Pp')) else: parser.error('missing %s' % k) return Anon(d)
def update_distances_and_angles(self, indices=None, want_angles=False, want_distances=False): '''Calculate and update distances and kagan angles between events. :param indices: list of lists of indices (optional) Ideally, indices are consecutive for best performance.''' want_angles = want_angles or \ not self.marker_table_view.isColumnHidden( _column_mapping['Kagan Angle [deg]']) want_distances = want_distances or \ not self.marker_table_view.isColumnHidden( _column_mapping['Dist [km]']) if not (want_distances or want_angles): return indices = indices or [[]] indices = [i for ii in indices for i in ii] if len(indices) != 1: return if self.last_active_event == self.pile_viewer.get_active_event(): return else: self.last_active_event = self.pile_viewer.get_active_event() markers = self.pile_viewer.markers nmarkers = len(markers) omarker = markers[indices[0]] if not isinstance(omarker, EventMarker): return else: oevent = omarker.get_event() emarkers = [m for m in markers if isinstance(m, EventMarker)] if len(emarkers) < 2: return else: events = [em.get_event() for em in emarkers] nevents = len(events) if want_distances: lats = num.zeros(nevents) lons = num.zeros(nevents) for i in range(nevents): lats[i] = events[i].lat lons[i] = events[i].lon olats = num.zeros(nevents) olons = num.zeros(nevents) olats[:] = oevent.lat olons[:] = oevent.lon dists = orthodrome.distance_accurate50m_numpy( lats, lons, olats, olons) dists /= 1000. dists = [round(x, 1) for x in dists] self.distances = dict(list(zip(emarkers, dists))) if want_angles: if oevent.moment_tensor: for em in emarkers: e = em.get_event() if e.moment_tensor: a = kagan_angle(oevent.moment_tensor, e.moment_tensor) self.kagan_angles[em] = a else: self.kagan_angles = {} istart = self.index(0, _column_mapping['Dist [km]']) istop = self.index(nmarkers - 1, _column_mapping['Kagan Angle [deg]']) self.dataChanged.emit(istart, istop)
def plot_gnss(gnss_target, result, ifig, vertical=False): campaign = gnss_target.campaign item = PlotItem( name='fig_%i' % ifig, attributes={'targets': gnss_target.path}, title=u'Static GNSS Surface Displacements - Campaign %s' % campaign.name, description=u''' Static surface displacement from GNSS campaign %s (black vectors) and displacements derived from best model (red). ''' % campaign.name) event = source.pyrocko_event() locations = campaign.stations + [event] lat, lon = od.geographic_midpoint_locations(locations) if self.radius is None: coords = num.array([loc.effective_latlon for loc in locations]) radius = od.distance_accurate50m_numpy(lat[num.newaxis], lon[num.newaxis], coords[:, 0].max(), coords[:, 1]).max() radius *= 1.1 if radius < 30. * km: logger.warn('Radius of GNSS campaign %s too small, defaulting' ' to 30 km' % campaign.name) radius = 30 * km model_camp = gnss.GNSSCampaign(stations=copy.deepcopy( campaign.stations), name='grond model') for ista, sta in enumerate(model_camp.stations): sta.north.shift = result.statics_syn['displacement.n'][ista] sta.north.sigma = 0. sta.east.shift = result.statics_syn['displacement.e'][ista] sta.east.sigma = 0. if sta.up: sta.up.shift = -result.statics_syn['displacement.d'][ista] sta.up.sigma = 0. m = automap.Map(width=self.size_cm[0], height=self.size_cm[1], lat=lat, lon=lon, radius=radius, show_topo=self.show_topo, show_grid=self.show_grid, show_rivers=self.show_rivers, color_wet=(216, 242, 254), color_dry=(238, 236, 230)) all_stations = campaign.stations + model_camp.stations offset_scale = num.zeros(len(all_stations)) for ista, sta in enumerate(all_stations): for comp in sta.components.values(): offset_scale[ista] += comp.shift offset_scale = num.sqrt(offset_scale**2).max() m.add_gnss_campaign(campaign, psxy_style={ 'G': 'black', 'W': '0.8p,black', }, offset_scale=offset_scale, vertical=vertical) m.add_gnss_campaign(model_camp, psxy_style={ 'G': 'red', 'W': '0.8p,red', 't': 30, }, offset_scale=offset_scale, vertical=vertical, labels=False) if isinstance(problem, CMTProblem) \ or isinstance(problem, VLVDProblem): from pyrocko import moment_tensor from pyrocko.plot import gmtpy mt = event.moment_tensor.m_up_south_east() ev_lat, ev_lon = event.effective_latlon xx = num.trace(mt) / 3. mc = num.matrix([[xx, 0., 0.], [0., xx, 0.], [0., 0., xx]]) mc = mt - mc mc = mc / event.moment_tensor.scalar_moment() * \ moment_tensor.magnitude_to_moment(5.0) m6 = tuple(moment_tensor.to6(mc)) symbol_size = 20. m.gmt.psmeca(S='%s%g' % ('d', symbol_size / gmtpy.cm), in_rows=[(ev_lon, ev_lat, 10) + m6 + (1, 0, 0)], M=True, *m.jxyr) elif isinstance(problem, RectangularProblem): m.gmt.psxy(in_rows=source.outline(cs='lonlat'), L='+p2p,black', W='1p,black', G='black', t=60, *m.jxyr) elif isinstance(problem, VolumePointProblem): ev_lat, ev_lon = event.effective_latlon dV = abs(source.volume_change) sphere_radius = num.cbrt(dV / (4. / 3. * num.pi)) volcanic_circle = [ev_lon, ev_lat, '%fe' % sphere_radius] m.gmt.psxy(S='E-', in_rows=[volcanic_circle], W='1p,black', G='orange3', *m.jxyr) return (item, m)
def call(self): try: global vtk import vtk import sys sys.path[0:0] = [self.module_dir()] from grid_topo import setup_vtk_map_actor sys.path[0:1] = [] except ImportError as _import_error: self.fail('\nImportError:\n%s' % _import_error) vtk = None self.cleanup() viewer = self.get_viewer() stations = [] events = [] cone_actors = [] sphere_actors = [] if self.want_stations: stations = self.get_stations() if self.want_events: markers = self.get_selected_event_markers() if len(markers) == 0: tmin, tmax = self.get_selected_time_range(fallback=True) markers = filter(lambda x: tmin < x.tmin < tmax, self.get_event_markers()) events = [m.get_event() for m in markers] active_event = viewer.get_active_event() to_rgba = ColorMapper('summer') times = [e.time for e in events] to_rgba.set_range(min(times), max(times)) if active_event: to_rgba = ColorMapper('gray') to_rgba.set_range(min(times), max(times)) for e in events: e.get_vtk_color = to_rgba if active_event: def return_red(e): return (1., 0., 0., 1.) active_event.get_vtk_color = return_red events.append(active_event) all_lats = [] all_lons = [] for s in stations: all_lats.append(s.lat) all_lons.append(s.lon) for e in events: all_lats.append(e.lat) all_lons.append(e.lon) center_lat, center_lon = ortho.geographic_midpoint( num.array(all_lats), num.array(all_lons)) center_lats = num.array([center_lat] * len(all_lats)) center_lons = num.array([center_lon] * len(all_lons)) distances = ortho.distance_accurate50m_numpy(num.array(all_lats), num.array(all_lons), center_lats, center_lons) distance_max = num.max(distances) size = distance_max / 50. if len(events) != 0: sphere_actors = events_to_vtksphere_actors(events, z_scale=self.z_scale, size=size / 2.) if len(stations) != 0: ns, es, depths = locations_to_ned(stations, z_scale=self.z_scale, has_elevation=True) adata = num.array((es, ns, -depths)).flatten(order='F') data = numpy_support.numpy_to_vtk(adata, deep=True, array_type=vtk.VTK_FLOAT) data.SetNumberOfComponents(3) cone_actors = self.stations_to_vtkcone_actors(data, size=size) if self.want_topo: distance_max += self.margin_radius * 1000. topo_actor = setup_vtk_map_actor(center_lat, center_lon, distance_max, super_elevation=self.z_scale, decimation=int(self.z_decimation or 1), smoothing=self.smoothing) self.frame = self.vtk_frame() for actor in cone_actors: actor.GetProperty().SetColor(0., 0., 1.) self.frame.add_actor(actor) for actor in sphere_actors: self.frame.add_actor(actor) if self.want_topo: self.frame.add_actor(topo_actor) self.frame.renderer.SetBackground(0.01, 0.05, 0.1) self.frame.init()