def get_corner_coordinates(self): inc = self.inclination llLat, llLon = self.get_ll_anchor() urLat, urLon = self.get_ur_anchor() if self.orbital_node == 'Ascending': ulLat, ulLon = od.ne_to_latlon( self.lat_center, self.lon_center, self.track_length/2, -num.tan(inc*d2r) * self.width/2) lrLat, lrLon = od.ne_to_latlon( self.lat_center, self.lon_center, -self.track_length/2, num.tan(inc*d2r) * self.width/2) elif self.orbital_node == 'Descending': urLat, urLon = od.ne_to_latlon( self.lat_center, self.lon_center, self.track_length/2, num.tan(inc*d2r) * self.width/2) llLat, llLon = od.ne_to_latlon( self.lat_center, self.lon_center, -self.track_length/2, -num.tan(inc*d2r) * self.width/2) return ((llLat, llLon), (ulLat, ulLon), (urLat, urLon), (lrLat, lrLon))
def test_midpoint(self): center_lons = num.linspace(0., 180., 5) center_lats = [0., 89.] npoints = 10000 half_side_length = 1000000. distance_error_max = 50000. for lat in center_lats: for lon in center_lons: n = num.random.uniform( -half_side_length, half_side_length, npoints) e = num.random.uniform( -half_side_length, half_side_length, npoints) dlats, dlons = orthodrome.ne_to_latlon(lat, lon, n, e) clat, clon = orthodrome.geographic_midpoint(dlats, dlons) d = orthodrome.distance_accurate50m_numpy( clat, clon, lat, lon)[0] if plot: import matplotlib.pyplot as plt fig = plt.figure() ax = fig.add_subplot(111) ax.scatter(n, e) c_n, c_e = orthodrome.latlon_to_ne_numpy( lat, lon, clat, clon) ax.plot(c_n, c_e, 'ro') plt.show() self.assertTrue(d < distance_error_max, 'Distance %s > %s' % (d, distance_error_max) + '(maximum error)\n tested lat/lon: %s/%s' % (lat, lon))
def testGridDistances(self): for i in range(100): w,h = 20,15 km = 1000. gsize = random.uniform(0.,1.)*2.*10.**random.uniform(4.,7.) north_grid, east_grid = num.meshgrid(num.linspace(-gsize/2.,gsize/2.,11) , num.linspace(-gsize/2.,gsize/2.,11) ) north_grid = north_grid.flatten() east_grid = east_grid.flatten() lat_delta = gsize/config.earthradius*r2d*2. lon = random.uniform(-180.,180.) lat = random.uniform(-90.,90.) lat_grid, lon_grid = orthodrome.ne_to_latlon(lat, lon, north_grid, east_grid) lat_grid_alt, lon_grid_alt = orthodrome.ne_to_latlon_alternative_method(lat, lon, north_grid, east_grid) for la, lo, no, ea in zip(lat_grid, lon_grid, north_grid, east_grid): a = Loc() a.lat = la a.lon = lo b = Loc() b.lat = lat b.lon = lon cd = orthodrome.cosdelta(a,b) assert cd <= 1.0 d = num.arccos(cd)*config.earthradius d2 = math.sqrt(no**2+ea**2) assert not (abs(d-d2) > 1.0e-3 and d2 > 1.)
def testProjectionsZOnly(self): km = 1000. ev = model.Event(lat=-10, lon=150., depth=0.0) for azi in num.linspace(0., 360., 37): lat, lon = orthodrome.ne_to_latlon( ev.lat, ev.lon, 10.*km * math.cos(azi), 10.*km * math.sin(azi)) sta = model.Station(lat=lat, lon=lon) sta.set_event_relative_data(ev) sta.set_channels_by_name('BHZ', 'BHN', 'BHE') traces = [ trace.Trace( channel='BHZ', ydata=num.array([1.0])), ] projected = [] for m, in_channels, out_channels in sta.guess_projections_to_enu(): projected.extend(trace.project( traces, m, in_channels, out_channels)) def g(traces, cha): for tr in traces: if tr.channel == cha: return tr z = g(projected, 'U') assert(near(z.ydata[0], 1.0, 0.001))
def testGridDistances(self): for i in range(100): w, h = 20, 15 km = 1000.0 gsize = random.uniform(0.0, 1.0) * 2.0 * 10.0 ** random.uniform(4.0, 7.0) north_grid, east_grid = num.meshgrid( num.linspace(-gsize / 2.0, gsize / 2.0, 11), num.linspace(-gsize / 2.0, gsize / 2.0, 11) ) north_grid = north_grid.flatten() east_grid = east_grid.flatten() lat_delta = gsize / config.earthradius * r2d * 2.0 lon = random.uniform(-180.0, 180.0) lat = random.uniform(-90.0, 90.0) lat_grid, lon_grid = orthodrome.ne_to_latlon(lat, lon, north_grid, east_grid) lat_grid_alt, lon_grid_alt = orthodrome.ne_to_latlon_alternative_method(lat, lon, north_grid, east_grid) for la, lo, no, ea in zip(lat_grid, lon_grid, north_grid, east_grid): a = Loc() a.lat = la a.lon = lo b = Loc() b.lat = lat b.lon = lon d = num.arccos(orthodrome.cosdelta(a, b)) * config.earthradius d2 = math.sqrt(no ** 2 + ea ** 2) if abs(d - d2) > 1.0e-3 and d2 > 1.0: print "x", a.lat, a.lon, b.lat, b.lon print "y", d, d2, d - d2
def testProjections(self): km = 1000. ev = model.Event(lat=-10, lon=150., depth=0.0) for azi in num.linspace(0., 360., 37): lat,lon = orthodrome.ne_to_latlon(ev.lat, ev.lon, 10.*km * math.cos(azi), 10.*km * math.sin(azi) ) sta = model.Station(lat=lat, lon=lon) sta.set_event_relative_data(ev) sta.set_channels_by_name('BHZ', 'BHE', 'BHN') r = 1. t = 1. traces = [ trace.Trace(channel='BHE', ydata=num.array([math.sin(azi)*r+math.cos(azi)*t])), trace.Trace(channel='BHN', ydata=num.array([math.cos(azi)*r-math.sin(azi)*t])), ] for m, in_channels, out_channels in sta.guess_projections_to_rtu(): projected = trace.project(traces, m, in_channels, out_channels) def g(traces, cha): for tr in traces: if tr.channel == cha: return tr r = g(projected, 'R') t = g(projected, 'T') assert( near(r.ydata[0], 1.0, 0.001) ) assert( near(t.ydata[0], 1.0, 0.001) )
def effective_latlons(self): if self._latlons is None: if self.lats is not None and self.lons is not None: if (self.north_shifts is not None and self.east_shifts is not None): self._latlons = orthodrome.ne_to_latlon( self.lats, self.lons, self.north_shifts, self.east_shifts) else: self._latlons = self.lats, self.lons else: lat = g(self.lat, 0.0) lon = g(self.lon, 0.0) self._latlons = orthodrome.ne_to_latlon( lat, lon, self.north_shifts, self.east_shifts) return self._latlons
def add_gnss_campaign(self, campaign, psxy_style=dict(), offset_scale=None, labels=True, vertical=False): if offset_scale is None: offset_scale = num.array( [math.sqrt(s.east.shift**2 + s.north.shift**2 + s.up.shift**2) for s in campaign.stations]).max() size = math.sqrt(self.height**2 + self.width**2) scale = (size/50.) / offset_scale lats, lons = zip( *[od.ne_to_latlon(s.lat, s.lon, s.north_shift, s.east_shift) for s in campaign.stations]) if vertical: rows = [[lons[ista], lats[ista], 0., s.up.shift, 0., s.up.sigma, 0.] for ista, s in enumerate(campaign.stations) if s.up is not None] default_psxy_style = { 'h': 0, 'W': '0.5p,black', 'G': 'black', 'L': True, 'S': 'e%ec/0.95/8' % scale, } else: rows = [[lons[ista], lats[ista], s.east.shift, s.north.shift, s.east.sigma, s.north.sigma, s.correlation_ne] for ista, s in enumerate(campaign.stations) if s.east is not None or s.north is not None] default_psxy_style = { 'h': 0, 'W': '0.5p,black', 'G': 'black', 'L': True, 'S': 'e%ec/0.95/8' % scale, } if labels: for row, sta in zip(rows, campaign.stations): if vertical and sta.up is None: continue row.append(sta.code) default_psxy_style.update(psxy_style) self.gmt.psvelo( in_rows=rows, *self.jxyr, **default_psxy_style)
def extent(lon, lat, w, h, n): x = num.linspace(-0.5 * w, 0.5 * w, n) y = num.linspace(-0.5 * h, 0.5 * h, n) slats, slons = od.ne_to_latlon(lat, lon, y[0], x) nlats, nlons = od.ne_to_latlon(lat, lon, y[-1], x) south = slats.min() north = nlats.max() wlats, wlons = od.ne_to_latlon(lat, lon, y, x[0]) elats, elons = od.ne_to_latlon(lat, lon, y, x[-1]) elons = num.where(elons < wlons, elons + 360.0, elons) if elons.max() - elons.min() > 180 or wlons.max() - wlons.min() > 180.0: west = -180.0 east = 180.0 else: west = wlons.min() east = elons.max() return topo.positive_region((west, east, south, north))
def extent(lon, lat, w, h, n): x = num.linspace(-0.5 * w, 0.5 * w, n) y = num.linspace(-0.5 * h, 0.5 * h, n) slats, slons = od.ne_to_latlon(lat, lon, y[0], x) nlats, nlons = od.ne_to_latlon(lat, lon, y[-1], x) south = slats.min() north = nlats.max() wlats, wlons = od.ne_to_latlon(lat, lon, y, x[0]) elats, elons = od.ne_to_latlon(lat, lon, y, x[-1]) elons = num.where(elons < wlons, elons + 360., elons) if elons.max() - elons.min() > 180 or wlons.max() - wlons.min() > 180.: west = -180. east = 180. else: west = wlons.min() east = elons.max() return topo.positive_region((west, east, south, north))
def effective_latlon(self): ''' Property holding the offset-corrected lat/lon pair of the location. ''' if self._latlon is None: if self.north_shift == 0.0 and self.east_shift == 0.0: self._latlon = self.lat, self.lon else: self._latlon = tuple(float(x) for x in orthodrome.ne_to_latlon( self.lat, self.lon, self.north_shift, self.east_shift)) return self._latlon
def get_mask_water(self): if self._mask_water is None: east_shifts, north_shifts = self.get_grid() east_shifts -= east_shifts[0, -1]/2 north_shifts -= north_shifts[-1, -1]/2 latlon = od.ne_to_latlon(self.lat_center, self.lon_center, north_shifts.ravel(), east_shifts.ravel()) points = num.array(latlon).T self._mask_water = get_gsshg().get_land_mask(points)\ .reshape(*east_shifts.shape) return self._mask_water
def get_mask_water(self): if self._mask_water is None: east_shifts, north_shifts = self.get_grid() east_shifts -= east_shifts[0, -1] / 2 north_shifts -= north_shifts[-1, -1] / 2 latlon = od.ne_to_latlon(self.lat_center, self.lon_center, north_shifts.ravel(), east_shifts.ravel()) points = num.array(latlon).T self._mask_water = get_gsshg().get_land_mask(points)\ .reshape(*east_shifts.shape) return self._mask_water
def get_corner_coordinates(self): inc = self.inclination llLat, llLon = self.get_ll_anchor() urLat, urLon = self.get_ur_anchor() if self.orbital_node == 'Ascending': ulLat, ulLon = od.ne_to_latlon( self.lat_center, self.lon_center, self.track_length / 2, -num.tan(inc * d2r) * self.width / 2) lrLat, lrLon = od.ne_to_latlon(self.lat_center, self.lon_center, -self.track_length / 2, num.tan(inc * d2r) * self.width / 2) elif self.orbital_node == 'Descending': urLat, urLon = od.ne_to_latlon(self.lat_center, self.lon_center, self.track_length / 2, num.tan(inc * d2r) * self.width / 2) llLat, llLon = od.ne_to_latlon( self.lat_center, self.lon_center, -self.track_length / 2, -num.tan(inc * d2r) * self.width / 2) return ((llLat, llLon), (ulLat, ulLon), (urLat, urLon), (lrLat, lrLon))
def add_gnss_campaign(self, campaign, psxy_style=dict(), offset_scale=None, labels=True, vertical=False): if offset_scale is None: offset_scale = num.array([ math.sqrt(s.east.shift**2 + s.north.shift**2 + s.up.shift**2) for s in campaign.stations ]).max() size = math.sqrt(self.height**2 + self.width**2) scale = (size / 10.) / offset_scale default_psxy_style = { 'h': 0, 'W': '0.5p,black', 'G': 'black', 'L': True, 'S': 'e%dc/0.95/8' % scale, } default_psxy_style.update(psxy_style) lats, lons = zip(*[ od.ne_to_latlon(s.lat, s.lon, s.north_shift, s.east_shift) for s in campaign.stations ]) if vertical: rows = [[ lons[ista], lats[ista], 0., s.up.shift, s.east.sigma, s.north.sigma, s.correlation_ne ] for ista, s in enumerate(campaign.stations) if s.up is not None] else: rows = [[ lons[ista], lats[ista], s.east.shift, s.north.shift, s.east.sigma, s.north.sigma, s.correlation_ne ] for ista, s in enumerate(campaign.stations)] if labels: for row, sta in zip(rows, campaign.stations): if vertical and sta.up is None: continue row.append(sta.code) self.gmt.psvelo(in_rows=rows, *self.jxyr, **default_psxy_style)
def call(self): self.mycleanup() self.detections = [] i_detection = 0 zpeak = 0. lat = 0. lon = 0. for traces in self.chopper_selected_traces( mode='all', trace_selector=lambda x: x.station == "SMAX", fallback=True): tr_smax = [tr for tr in traces if tr.location == ''] tr_i = [tr for tr in traces if tr.location == 'i'] if not tr_i: tr_i = [None] * len(tr_smax) for tr_i, tr_stackmax in zip(tr_i, tr_smax): tpeaks, apeaks = tr_stackmax.peaks(self.detector_threshold, self.tsearch) if self.level_trace: ltrace = tr_stackmax.copy(data=False) ltrace.set_ydata( num.ones(tr_stackmax.data_len()) * self.detector_threshold) self.add_trace(ltrace) for t, a in zip(tpeaks, apeaks): if tr_i: lat, lon, xpeak, ypeak, zpeak = \ self.grid.index_to_location(tr_i(t)[1]) lat, lon = orthodrome.ne_to_latlon( lat, lon, xpeak, ypeak) e = model.Event(time=t, name="%s-%s" % (i_detection, a), lat=lat, lon=lon, depth=zpeak) self.detections.append( gui_util.EventMarker(event=e, kind=int(self.marker_kind[0]))) i_detection += 1 self.add_markers(self.detections) if self.hold_figure: self.show_comparison()
def export_geojson(self, filename): import geojson self._log.debug("Exporting GeoJSON Quadtree to %s", filename) features = [] for lf in self.leaves: llN, llE, urN, urE = (lf.llN, lf.llE, lf.urN, lf.urE) if self.frame.isDegree(): llN += self.frame.llLat llE += self.frame.llLon urN += self.frame.llLat urE += self.frame.llLon coords = num.array( [(llN, llE), (llN, urE), (urN, urE), (urN, llE), (llN, llE)] ) if self.frame.isMeter(): coords = od.ne_to_latlon(self.frame.llLat, self.frame.llLon, *coords.T) coords = num.array(coords).T coords = coords[:, [1, 0]].tolist() feature = geojson.Feature( geometry=geojson.Polygon(coordinates=[coords]), id=lf.id, properties={ "mean": float(lf.mean), "median": float(lf.median), "std": float(lf.std), "var": float(lf.var), "phi": float(lf.phi), "theta": float(lf.theta), "unitE": float(lf.unitE), "unitN": float(lf.unitN), "unitU": float(lf.unitU), }, ) features.append(feature) collection = geojson.FeatureCollection(features) with open(filename, "w") as f: geojson.dump(collection, f)
def add_stations(self, stations, psxy_style=dict()): default_psxy_style = {'S': 't8p', 'G': 'black'} default_psxy_style.update(psxy_style) lats, lons = zip(*[ od.ne_to_latlon(s.lat, s.lon, s.north_shift, s.east_shift) for s in stations ]) self.gmt.psxy(in_columns=(lons, lats), *self.jxyr, **default_psxy_style) for station in stations: self.add_label( station.lat, station.lon, '.'.join(x for x in (station.network, station.station) if x))
def coordinates(self): """Local east and north coordinates of all pixels in ``Nx2`` matrix. :type: :class:`numpy.ndarray`, size ``Nx2`` """ coords = num.empty((self.rows * self.cols, 2)) coords[:, 0] = num.repeat(self.E[num.newaxis, :], self.rows, axis=0).flatten() coords[:, 1] = num.repeat(self.N[:, num.newaxis], self.cols, axis=1).flatten() if self.isMeter(): coords = ne_to_latlon(self.llLat, self.llLon, *coords.T) coords = num.array(coords).T else: coords[:, 0] += self.llLon coords[:, 1] += self.llLat return coords
def add_stations(self, stations, psxy_style=dict()): default_psxy_style = { 'S': 't8p', 'G': 'black' } default_psxy_style.update(psxy_style) lats, lons = zip(*[od.ne_to_latlon( s.lat, s.lon, s.north_shift, s.east_shift) for s in stations]) self.gmt.psxy( in_columns=(lons, lats), *self.jxyr, **default_psxy_style) for station in stations: self.add_label(station.lat, station.lon, '.'.join( x for x in (station.network, station.station) if x))
def outline(self, cs='xyz'): points = outline_rect_source(self.strike, self.dip, self.length, self.width) center = self.center(self.width) points[:, 0] += center[0] points[:, 1] += center[1] points[:, 2] += center[2] if cs == 'xyz': return points elif cs == 'xy': return points[:, :2] elif cs in ('latlon', 'lonlat'): latlon = ne_to_latlon(self.lat, self.lon, points[:, 0], points[:, 1]) latlon = num.array(latlon).T if cs == 'latlon': return latlon else: return latlon[:, ::-1]
def outline(self, cs='xyz'): points = gf.seismosizer.outline_rect_source(self.strike, self.dip, self.length, self.width, 'center') points[:, 0] += self.north_shift points[:, 1] += self.east_shift points[:, 2] += self.depth if cs == 'xyz': return points elif cs == 'xy': return points[:, :2] elif cs in ('latlon', 'lonlat'): latlon = ne_to_latlon(self.lat, self.lon, points[:, 0], points[:, 1]) latlon = num.array(latlon).T if cs == 'latlon': return latlon else: return latlon[:, ::-1]
def detections_to_event_markers(fn_detections): markers = [] if fn_detections: with open(fn_detections, 'r') as f: for line in f.readlines(): data = line.split() i, t_d, t_t, apeak, latpeak, lonpeak, xpeak, ypeak, zpeak = \ data lat, lon = orthodrome.ne_to_latlon(float(latpeak), float(lonpeak), float(xpeak), float(ypeak)) t = util.str_to_time("%s %s" % (t_d, t_t)) label = "%s-%s" % (apeak, i) e = model.Event(lat=lat, lon=lon, depth=float(zpeak), name=label, time=t) m = gui_util.EventMarker(e, kind=int(kind_default[0])) markers.append(m) return markers
def export_geojson(self, filename): import geojson self._log.debug('Exporting GeoJSON Quadtree to %s', filename) features = [] for lf in self.leaves: llN, llE, urN, urE = (lf.llN, lf.llE, lf.urN, lf.urE) if self.frame.isDegree(): llN += self.frame.llLat llE += self.frame.llLon urN += self.frame.llLat urE += self.frame.llLon coords = num.array([(llN, llE), (llN, urE), (urN, urE), (urN, llE), (llN, llE)]) if self.frame.isMeter(): coords = od.ne_to_latlon(self.frame.llLat, self.frame.llLon, *coords.T) coords = num.array(coords).T coords = coords[:, [1, 0]].tolist() feature = geojson.Feature( geometry=geojson.Polygon(coordinates=[coords]), id=lf.id, properties={ 'mean': lf.mean, 'median': lf.median, 'std': lf.std, 'var': lf.var }) features.append(feature) collection = geojson.FeatureCollection(features) with open(filename, 'w') as f: geojson.dump(collection, f)
def testProjections(self): km = 1000. ev = model.Event(lat=-10, lon=150., depth=0.0) for azi in num.linspace(0., 360., 37): lat, lon = orthodrome.ne_to_latlon(ev.lat, ev.lon, 10. * km * math.cos(azi), 10. * km * math.sin(azi)) sta = model.Station(lat=lat, lon=lon) sta.set_event_relative_data(ev) sta.set_channels_by_name('BHZ', 'BHE', 'BHN') r = 1. t = 1. traces = [ trace.Trace(channel='BHE', ydata=num.array( [math.sin(azi) * r + math.cos(azi) * t])), trace.Trace(channel='BHN', ydata=num.array( [math.cos(azi) * r - math.sin(azi) * t])), ] for m, in_channels, out_channels in sta.guess_projections_to_rtu(): projected = trace.project(traces, m, in_channels, out_channels) def g(traces, cha): for tr in traces: if tr.channel == cha: return tr r = g(projected, 'R') t = g(projected, 'T') assert (near(r.ydata[0], 1.0, 0.001)) assert (near(t.ydata[0], 1.0, 0.001))
def get_latlon(self, i): rstate = self.get_rstate(i) for itry in range(self.ntries): radius = self.get_radius() if radius is None: lat = random_lat(rstate) lon = rstate.uniform(-180., 180.) else: lat_center, lon_center = self.get_center_latlon() while True: north = rstate.uniform(-radius, radius) east = rstate.uniform(-radius, radius) if math.sqrt(north**2 + east**2) <= radius: break lat, lon = od.ne_to_latlon(lat_center, lon_center, north, east) if not self.avoid_water or is_on_land(lat, lon): return lat, lon if self.avoid_water: sadd = ' (avoiding water)' raise ScenarioError('could not generate location%s' % sadd)
def testGridDistances(self): for i in range(100): w, h = 20, 15 km = 1000. gsize = random.uniform(0., 1.) * 2. * 10.**random.uniform(4., 7.) north_grid, east_grid = num.meshgrid( num.linspace(-gsize / 2., gsize / 2., 11), num.linspace(-gsize / 2., gsize / 2., 11)) north_grid = north_grid.flatten() east_grid = east_grid.flatten() lat_delta = gsize / config.earthradius * r2d * 2. lon = random.uniform(-180., 180.) lat = random.uniform(-90., 90.) lat_grid, lon_grid = orthodrome.ne_to_latlon( lat, lon, north_grid, east_grid) lat_grid_alt, lon_grid_alt = orthodrome.ne_to_latlon_alternative_method( lat, lon, north_grid, east_grid) for la, lo, no, ea in zip(lat_grid, lon_grid, north_grid, east_grid): a = Loc() a.lat = la a.lon = lo b = Loc() b.lat = lat b.lon = lon cd = orthodrome.cosdelta(a, b) assert cd <= 1.0 d = num.arccos(cd) * config.earthradius d2 = math.sqrt(no**2 + ea**2) assert not (abs(d - d2) > 1.0e-3 and d2 > 1.)
def corners(lon, lat, w, h): ll_lat, ll_lon = od.ne_to_latlon(lat, lon, -0.5 * h, -0.5 * w) ur_lat, ur_lon = od.ne_to_latlon(lat, lon, 0.5 * h, 0.5 * w) return ll_lon, ll_lat, ur_lon, ur_lat
def test_fomosto_vs_psgrn_pscmp(self): store_dir, c = self.get_pscmp_store_info() origin = gf.Location(lat=10., lon=-15.) # test GF store TestRF = dict(lat=origin.lat, lon=origin.lon, depth=2. * km, width=0.2 * km, length=0.5 * km, rake=90., dip=45., strike=45., slip=num.random.uniform(1., 5.)) source_plain = gf.RectangularSource(**TestRF) source_with_time = gf.RectangularSource(time=123.5, **TestRF) neast = 40 nnorth = 40 N, E = num.meshgrid(num.linspace(-20. * km, 20. * km, nnorth), num.linspace(-20. * km, 20. * km, neast)) # direct pscmp output lats, lons = ortd.ne_to_latlon(origin.lat, origin.lon, N.flatten(), E.flatten()) pscmp_sources = [psgrn_pscmp.PsCmpRectangularSource(**TestRF)] cc = c.pscmp_config cc.observation = psgrn_pscmp.PsCmpScatter(lats=lats, lons=lons) cc.rectangular_source_patches = pscmp_sources ccf = psgrn_pscmp.PsCmpConfigFull(**cc.items()) ccf.psgrn_outdir = os.path.join(store_dir, c.gf_outdir) + '/' t2 = time() runner = psgrn_pscmp.PsCmpRunner(keep_tmp=False) runner.run(ccf) ps2du = runner.get_results(component='displ')[0] t3 = time() logger.info('pscmp stacking time %f' % (t3 - t2)) un_pscmp = ps2du[:, 0] ue_pscmp = ps2du[:, 1] ud_pscmp = ps2du[:, 2] # test against engine starget_nn = gf.StaticTarget(lats=num.full(N.size, origin.lat), lons=num.full(N.size, origin.lon), north_shifts=N.flatten(), east_shifts=E.flatten(), interpolation='nearest_neighbor') starget_ml = gf.StaticTarget(lats=num.full(N.size, origin.lat), lons=num.full(N.size, origin.lon), north_shifts=N.flatten(), east_shifts=E.flatten(), interpolation='multilinear') engine = gf.LocalEngine(store_dirs=[store_dir]) for source in [source_plain, source_with_time]: t0 = time() r = engine.process(source, [starget_nn, starget_ml]) t1 = time() logger.info('pyrocko stacking time %f' % (t1 - t0)) for static_result in r.static_results(): un_fomosto = static_result.result['displacement.n'] ue_fomosto = static_result.result['displacement.e'] ud_fomosto = static_result.result['displacement.d'] num.testing.assert_allclose(un_fomosto, un_pscmp, atol=1 * mm) num.testing.assert_allclose(ue_fomosto, ue_pscmp, atol=1 * mm) num.testing.assert_allclose(ud_fomosto, ud_pscmp, atol=1 * mm)
def fomosto_vs_psgrn_pscmp(self, pscmp_sources, gf_sources, atol=2 * mm): def plot_components_compare(fomosto_comps, psgrn_comps): import matplotlib.pyplot as plt fig, axes = plt.subplots(4, 3) for i, (fcomp, pscomp, cname) in enumerate( zip(fomosto_comps, psgrn_comps, ['N', 'E', 'D'])): fdispl = fcomp.reshape(nnorth, neast) pdispl = pscomp.reshape(nnorth, neast) pcbound = num.max([num.abs(pdispl.min()), pdispl.max()]) # fcbound = num.max([num.abs(fdispl.min()), fdispl.max()]) axes[0, i].imshow(pdispl, cmap='seismic', vmin=-pcbound, vmax=pcbound) axes[1, i].imshow(fdispl, cmap='seismic', vmin=-pcbound, vmax=pcbound) diff = pdispl - fdispl rdiff = pdispl / fdispl axes[2, i].imshow(diff, cmap='seismic') axes[3, i].imshow(rdiff, cmap='seismic') axes[0, i].set_title('PSCMP %s' % cname) axes[1, i].set_title('Fomosto %s' % cname) axes[2, i].set_title('abs diff min max %f, %f' % (diff.min(), diff.max())) axes[3, i].set_title('rel diff min max %f, %f' % (rdiff.min(), rdiff.max())) plt.show() store_dir, c = self.get_pscmp_store_info() origin = gf.Location(lat=10., lon=-15.) N, E = num.meshgrid(num.linspace(-20. * km, 20. * km, nnorth), num.linspace(-20. * km, 20. * km, neast)) # direct pscmp output lats, lons = ortd.ne_to_latlon(origin.lat, origin.lon, N.flatten(), E.flatten()) cc = c.pscmp_config cc.observation = psgrn_pscmp.PsCmpScatter(lats=lats, lons=lons) cc.rectangular_source_patches = pscmp_sources cc.snapshots = psgrn_pscmp.PsCmpSnapshots(tmin=0., tmax=1., deltatdays=1.) ccf = psgrn_pscmp.PsCmpConfigFull(**cc.items()) ccf.psgrn_outdir = os.path.join(store_dir, c.gf_outdir) + '/' t2 = time() runner = psgrn_pscmp.PsCmpRunner(keep_tmp=False) runner.run(ccf) ps2du = runner.get_results(component='displ')[0] logger.info('pscmp stacking time %f s' % (time() - t2)) un_pscmp = ps2du[:, 0] ue_pscmp = ps2du[:, 1] ud_pscmp = ps2du[:, 2] # test against engine starget_nn = gf.StaticTarget(lats=num.full(N.size, origin.lat), lons=num.full(N.size, origin.lon), north_shifts=N.flatten(), east_shifts=E.flatten(), interpolation='nearest_neighbor') starget_ml = gf.StaticTarget(lats=num.full(N.size, origin.lat), lons=num.full(N.size, origin.lon), north_shifts=N.flatten(), east_shifts=E.flatten(), interpolation='multilinear') engine = gf.LocalEngine(store_dirs=[store_dir]) for source in gf_sources: t0 = time() r = engine.process(source, [starget_nn, starget_ml]) logger.info('pyrocko stacking time %f' % (time() - t0)) for i, static_result in enumerate(r.static_results()): un_fomosto = static_result.result['displacement.n'] ue_fomosto = static_result.result['displacement.e'] ud_fomosto = static_result.result['displacement.d'] if show_plot: fomosto_comps = [un_fomosto, ue_fomosto, ud_fomosto] psgrn_comps = [un_pscmp, ue_pscmp, ud_pscmp] plot_components_compare(fomosto_comps, psgrn_comps) num.testing.assert_allclose(un_fomosto, un_pscmp, atol=atol) num.testing.assert_allclose(ue_fomosto, ue_pscmp, atol=atol) num.testing.assert_allclose(ud_fomosto, ud_pscmp, atol=atol)
def call(self): self.cleanup() viewer = self.get_viewer() master = viewer.get_active_event() if master is None: self.fail('no master event selected') stations = list(viewer.stations.values()) stations.sort(key=lambda s: (s.network,s.station)) if not stations: self.fail('no station information available') # gather events to be processed events = [] for m in viewer.markers: if isinstance(m, EventMarker): if m.kind == 0: events.append( m.get_event() ) events.sort(key=lambda ev: ev.time) event_to_number = {} for iev, ev in enumerate(events): event_to_number[ev] = iev if self.model_select.startswith('Global'): model_key = 'global' else: model_key = master.lat, master.lon if model_key != self.model_key: if self.model_select.startswith('Global'): self.model = cake.load_model() else: latlon = master.lat, master.lon profile = crust2x2.get_profile(*latlon) profile.set_layer_thickness(crust2x2.LWATER, 0.0) self.model = cake.LayeredModel.from_scanlines( cake.from_crust2x2_profile(profile)) self.model_key = model_key phases = { 'P': ([ cake.PhaseDef(x) for x in 'P p'.split() ], 'Z'), 'S': ([ cake.PhaseDef(x) for x in 'S s'.split() ], 'NE'), } phasenames = phases.keys() phasenames.sort() # synthetic arrivals and ray geometry for master event master_depth = master.depth if self.master_depth_km is not None: master_depth = self.master_depth_km * km tt = {} g = {} for iphase, phasename in enumerate(phasenames): for istation, station in enumerate(stations): dist = orthodrome.distance_accurate50m(master, station) azi = orthodrome.azimuth(master, station) arrivals = self.model.arrivals( phases=phases[phasename][0], distances=[ dist*cake.m2d ], zstart = master_depth, zstop = 0.0) if arrivals: first = arrivals[0] tt[station.network, station.station, phasename] = first.t takeoff = first.takeoff_angle() u = first.path.first_straight().u_in(first.endgaps) g[iphase, istation] = num.array([ math.cos(azi*d2r) * math.sin(takeoff*d2r) * u, math.sin(azi*d2r) * math.sin(takeoff*d2r) * u, math.cos(takeoff*d2r) * u ]) # gather picks for each event for ev in events: picks = {} for m2 in viewer.markers: if isinstance(m2, PhaseMarker) and m2.kind == 0: if m2.get_event() == ev: net, sta, _, _ = m2.one_nslc() picks[net,sta,m2.get_phasename()] = (m2.tmax + m2.tmin) / 2.0 ev.picks = picks # time corrections for extraction windows dataobs = [] datasyn = [] for phasename in phasenames: for station in stations: nsp = station.network, station.station, phasename datasyn.append(tt.get(nsp,None)) for ev in events: if nsp in ev.picks: ttobs = ev.picks[nsp] - ev.time else: ttobs = None dataobs.append(ttobs) ttsyn = num.array(datasyn, dtype=num.float).reshape(( len(phasenames), len(stations))) ttobs = num.array(dataobs, dtype=num.float).reshape(( len(phasenames), len(stations), len(events))) ttres = ttobs - ttsyn[:,:,num.newaxis] tt_corr_event = num.nansum( ttres, axis=1) / \ num.nansum( num.isfinite(ttres), axis=1 ) tt_corr_event = num.where(num.isfinite(tt_corr_event), tt_corr_event, 0.) ttres -= tt_corr_event[:,num.newaxis,:] tt_corr_station = num.nansum( ttres, axis=2) / \ num.nansum( num.isfinite(ttres), axis=2 ) tt_corr_station = num.where(num.isfinite(tt_corr_station), tt_corr_station, 0.) ttres -= tt_corr_station[:,:, num.newaxis] tevents_raw = num.array( [ ev.time for ev in events ] ) tevents_corr = tevents_raw + num.mean(tt_corr_event, axis=0) # print timing information print 'timing stats' for iphasename, phasename in enumerate(phasenames): data = [] for ev in events: iev = event_to_number[ev] for istation, station in enumerate(stations): nsp = station.network, station.station, phasename if nsp in tt and nsp in ev.picks: tarr = ev.time + tt[nsp] tarr_ec = tarr + tt_corr_event[iphasename, iev] tarr_ec_sc = tarr_ec + tt_corr_station[iphasename, istation] tobs = ev.picks[nsp] data.append((tobs-tarr, tobs-tarr_ec, tobs-tarr_ec_sc)) if data: data = num.array(data, dtype=num.float).T print 'event %10s %3s %3i %15.2g %15.2g %15.2g' % ( (ev.name, phasename, data.shape[1]) + tuple( num.mean(num.abs(x)) for x in data )) else: print 'event %10s %3s no picks' % (ev.name, phasename) # extract and preprocess waveforms tpad = 0.0 for f in self.corner_highpass, self.corner_lowpass: if f is not None: tpad = max(tpad, 1.0/f) pile = self.get_pile() waveforms = {} for ev in events: iev = event_to_number[ev] markers = [] for iphasename, phasename in enumerate(phasenames): for istation, station in enumerate(stations): nsp = station.network, station.station, phasename if nsp in tt: tarr = ev.time + tt[nsp] nslcs = [ ( station.network, station.station, '*', '*' ) ] marker = PhaseMarker( nslcs, tarr, tarr, 1, event=ev, phasename=phasename) markers.append(marker) tarr2 = tarr + tt_corr_station[iphasename, istation] + \ tt_corr_event[iphasename, iev] marker = PhaseMarker( nslcs, tarr2, tarr2, 2, event=ev, phasename=phasename) markers.append(marker) tmin = tarr2+self.tstart tmax = tarr2+self.tend marker = PhaseMarker( nslcs, tmin, tmax, 3, event=ev, phasename=phasename) markers.append(marker) trs = pile.all(tmin, tmax, tpad=tpad, trace_selector= lambda tr: tr.nslc_id[:2] == nsp[:2], want_incomplete=False) trok = [] for tr in trs: if num.all(tr.ydata[0] == tr.ydata): continue if self.corner_highpass: tr.highpass(4, self.corner_highpass) if self.corner_lowpass: tr.lowpass(4, self.corner_lowpass) tr.chop(tmin, tmax) tr.set_location(ev.name) #tr.shift( - (tmin - master.time) ) if num.all(num.isfinite(tr.ydata)): trok.append(tr) waveforms[nsp+(iev,)] = trok self.add_markers(markers) def get_channel(trs, cha): for tr in trs: if tr.channel == cha: return tr return None nevents = len(events) nstations = len(stations) nphases = len(phasenames) # correlate waveforms coefs = num.zeros((nphases, nstations, nevents, nevents)) coefs.fill(num.nan) tshifts = coefs.copy() tshifts_picked = coefs.copy() for iphase, phasename in enumerate(phasenames): for istation, station in enumerate(stations): nsp = station.network, station.station, phasename for a in events: ia = event_to_number[a] for b in events: ib = event_to_number[b] if ia == ib: continue if nsp in a.picks and nsp in b.picks: tshifts_picked[iphase,istation,ia,ib] = \ b.picks[nsp] - a.picks[nsp] wa = waveforms[nsp+(ia,)] wb = waveforms[nsp+(ib,)] channels = list(set([ tr.channel for tr in wa + wb ])) channels.sort() tccs = [] for cha in channels: if cha[-1] not in phases[phasename][1]: continue ta = get_channel(wa, cha) tb = get_channel(wb, cha) if ta is None or tb is None: continue tcc = trace.correlate(ta,tb, mode='full', normalization='normal', use_fft=True) tccs.append(tcc) if not tccs: continue tc = None for tcc in tccs: if tc is None: tc = tcc else: tc.add(tcc) tc.ydata *= 1./len(tccs) tmid = tc.tmin*0.5 + tc.tmax*0.5 tlen = (tc.tmax - tc.tmin)*0.5 tc_cut = tc.chop(tmid-tlen*0.5, tmid+tlen*0.5, inplace=False) tshift, coef = tc_cut.max() if (tshift < tc.tmin + 0.5*tc.deltat or tc.tmax - 0.5*tc.deltat < tshift): continue coefs[iphase,istation,ia,ib] = coef tshifts[iphase,istation,ia,ib] = tshift if self.show_correlation_traces: tc.shift(master.time - (tc.tmax + tc.tmin)/2.) self.add_trace(tc) #tshifts = tshifts_picked coefssum_sta = num.nansum(coefs, axis=2) / num.sum(num.isfinite(coefs), axis=2) csum_sta = num.nansum(coefssum_sta, axis=2) / num.sum(num.isfinite(coefssum_sta), axis=2) for iphase, phasename in enumerate(phasenames): for istation, station in enumerate(stations): print 'station %-5s %s %15.2g' % (station.station, phasename, csum_sta[iphase,istation]) coefssum = num.nansum(coefs, axis=1) / num.sum(num.isfinite(coefs), axis=1) csumevent = num.nansum(coefssum, axis=2) / num.sum(num.isfinite(coefssum), axis=2) above = num.where(num.isfinite(coefs), coefs >= self.min_corr, 0) csumabove = num.sum(num.sum(above, axis=1), axis=2) coefssum = num.ma.masked_invalid(coefssum) print 'correlation stats' for iphase, phasename in enumerate(phasenames): for ievent, event in enumerate(events): print 'event %10s %3s %8i %15.2g' % ( event.name, phasename, csumabove[iphase,ievent], csumevent[iphase,ievent]) # plot event correlation matrix fframe = self.figure_frame() fig = fframe.gcf() for iphase, phasename in enumerate(phasenames): p = fig.add_subplot(1,nphases,iphase+1) p.set_xlabel('Event number') p.set_ylabel('Event number') mesh = p.pcolormesh(coefssum[iphase]) cb = fig.colorbar(mesh, ax=p) cb.set_label('Max correlation coefficient') if self.save: fig.savefig(self.output_filename(dir='correlation.pdf')) fig.canvas.draw() # setup and solve linear system data = [] rows = [] weights = [] for iphase in xrange(nphases): for istation in xrange(nstations): for ia in xrange(nevents): for ib in xrange(ia+1,nevents): k = iphase, istation, ia, ib w = coefs[k] if not num.isfinite(tshifts[k]) \ or not num.isfinite(w) or w < self.min_corr: continue row = num.zeros(nevents*4) row[ia*4:ia*4+3] = g[iphase,istation] row[ia*4+3] = -1.0 row[ib*4:ib*4+3] = -g[iphase,istation] row[ib*4+3] = 1.0 weights.append(w) rows.append(row) data.append(tshifts[iphase,istation,ia,ib]) nsamp = len(data) for i in range(4): row = num.zeros(nevents*4) row[i::4] = 1. rows.append(row) data.append(0.0) if self.fix_depth: for ievent in range(nevents): row = num.zeros(nevents*4) row[ievent*4+2] = 1.0 rows.append(row) data.append(0.0) a = num.array(rows, dtype=num.float) d = num.array(data, dtype=num.float) w = num.array(weights, dtype=num.float) if self.weighting == 'equal': w[:nsamp] = 1.0 elif self.weighting == 'linear': pass elif self.weighting == 'quadratic': w[:nsamp] = w[:nsamp]**2 a[:nsamp,:] *= w[:,num.newaxis] d[:nsamp] *= w[:nsamp] x, residuals, rank, singular = num.linalg.lstsq(a,d) x0 = num.zeros(nevents*4) x0[3::4] = tevents_corr mean_abs_residual0 = num.mean( num.abs((num.dot(a[:nsamp], x0) - d[:nsamp])/w[:nsamp])) mean_abs_residual = num.mean( num.abs((num.dot(a[:nsamp],x) - d[:nsamp])/w[:nsamp])) print mean_abs_residual0, mean_abs_residual # distorted solutions npermutations = 100 noiseamount = mean_abs_residual xdistorteds = [] for i in range(npermutations): dnoisy = d.copy() dnoisy[:nsamp] += num.random.normal(size=nsamp)*noiseamount*w[:nsamp] xdistorted, residuals, rank, singular = num.linalg.lstsq(a,dnoisy) xdistorteds.append(xdistorted) mean_abs_residual = num.mean(num.abs(num.dot(a,xdistorted)[:nsamp] - dnoisy[:nsamp])) tmean = num.mean([ e.time for e in events ]) north = x[0::4] east = x[1::4] down = x[2::4] etime = x[3::4] + tmean def plot_range(x): mi, ma = num.percentile(x, [10., 90.]) ext = (ma-mi)/5. mi -= ext ma += ext return mi, ma lat, lon = orthodrome.ne_to_latlon(master.lat, master.lon, north, east) events_out = [] for ievent, event in enumerate(events): event_out = model.Event(time=etime[ievent], lat=lat[ievent], lon=lon[ievent], depth=down[ievent] + master_depth, name = event.name) mark = EventMarker(event_out, kind=4) self.add_marker(mark) events_out.append(event_out) model.Event.dump_catalog(events_out, 'events.relocated.txt') # plot results ned_orig = [] for event in events: n, e = orthodrome.latlon_to_ne(master, event) d = event.depth ned_orig.append((n,e,d)) ned_orig = num.array(ned_orig) ned_orig[:,0] -= num.mean(ned_orig[:,0]) ned_orig[:,1] -= num.mean(ned_orig[:,1]) ned_orig[:,2] -= num.mean(ned_orig[:,2]) north0, east0, down0 = ned_orig.T north2, east2, down2, time2 = num.hstack(xdistorteds).reshape((-1,4)).T fframe = self.figure_frame() fig = fframe.gcf() color_sym = (0.1,0.1,0.0) color_scat = (0.3,0.5,1.0,0.2) d = u'\u0394 ' if not self.fix_depth: p = fig.add_subplot(2,2,1, aspect=1.0) else: p = fig.add_subplot(1,1,1, aspect=1.0) mi_north, ma_north = plot_range(north) mi_east, ma_east = plot_range(east) mi_down, ma_down = plot_range(down) p.set_xlabel(d+'East [km]') p.set_ylabel(d+'North [km]') p.plot(east2/km, north2/km, '.', color=color_scat, markersize=2) p.plot(east/km, north/km, '+', color=color_sym) p.plot(east0/km, north0/km, 'x', color=color_sym) p0 = p for i,ev in enumerate(events): p.text(east[i]/km, north[i]/km, ev.name, clip_on=True) if not self.fix_depth: p = fig.add_subplot(2,2,2, sharey=p0, aspect=1.0) p.set_xlabel(d+'Depth [km]') p.set_ylabel(d+'North [km]') p.plot(down2/km, north2/km, '.', color=color_scat, markersize=2) p.plot(down/km, north/km, '+', color=color_sym) for i,ev in enumerate(events): p.text(down[i]/km, north[i]/km, ev.name, clip_on=True) p1 = p p = fig.add_subplot(2,2,3, sharex=p0, aspect=1.0) p.set_xlabel(d+'East [km]') p.set_ylabel(d+'Depth [km]') p.plot(east2/km, down2/km, '.', color=color_scat, markersize=2) p.plot(east/km, down/km, '+', color=color_sym) for i,ev in enumerate(events): p.text(east[i]/km, down[i]/km, ev.name, clip_on=True) p.invert_yaxis() p2 = p p0.set_xlim(mi_east/km, ma_east/km) p0.set_ylim(mi_north/km, ma_north/km) if not self.fix_depth: p1.set_xlim(mi_down/km, ma_down/km) p2.set_ylim(mi_down/km, ma_down/km) if self.save: fig.savefig(self.output_filename(dir='locations.pdf')) fig.canvas.draw()
def plot_erroneous_ne_to_latlon(): import gmtpy import random import subprocess import time while True: w, h = 20, 15 gsize = random.uniform(0., 1.) * 4. * 10.**random.uniform(4., 7.) north_grid, east_grid = num.meshgrid( num.linspace(-gsize / 2., gsize / 2., 11), num.linspace(-gsize / 2., gsize / 2., 11)) north_grid = north_grid.flatten() east_grid = east_grid.flatten() lat_delta = gsize / earthradius * r2d * 2. lon = random.uniform(-180., 180.) lat = random.uniform(-90., 90.) print(gsize / 1000.) lat_grid, lon_grid = orthodrome.ne_to_latlon(lat, lon, north_grid, east_grid) lat_grid_alt, lon_grid_alt = \ orthodrome.ne_to_latlon_alternative_method( lat, lon, north_grid, east_grid) maxerrlat = num.max(num.abs(lat_grid - lat_grid_alt)) maxerrlon = num.max(num.abs(lon_grid - lon_grid_alt)) eps = 1.0e-8 if maxerrlon > eps or maxerrlat > eps: print(lat, lon, maxerrlat, maxerrlon) gmt = gmtpy.GMT( config={ 'PLOT_DEGREE_FORMAT': 'ddd.xxxF', 'PAPER_MEDIA': 'Custom_%ix%i' % (w * gmtpy.cm, h * gmtpy.cm), 'GRID_PEN_PRIMARY': 'thinnest/0/50/0' }) south = max(-85., lat - 0.5 * lat_delta) north = min(85., lat + 0.5 * lat_delta) lon_delta = lat_delta / math.cos(lat * d2r) delta = lat_delta / 360. * earthradius * 2. * math.pi scale_km = gmtpy.nice_value(delta / 10.) / 1000. west = lon - 0.5 * lon_delta east = lon + 0.5 * lon_delta x, y = (west, east), (south, north) xax = gmtpy.Ax(mode='min-max', approx_ticks=4.) yax = gmtpy.Ax(mode='min-max', approx_ticks=4.) scaler = gmtpy.ScaleGuru(data_tuples=[(x, y)], axes=(xax, yax)) scaler['R'] = '-Rg' layout = gmt.default_layout() mw = 2.5 * gmtpy.cm layout.set_fixed_margins(mw, mw, mw / gmtpy.golden_ratio, mw / gmtpy.golden_ratio) widget = layout.get_widget() # widget['J'] = ('-JT%g/%g' % (lon, lat)) + '/%(width)gp' widget['J'] = ( '-JE%g/%g/%g' % (lon, lat, min(lat_delta/2., 180.)))\ + '/%(width)gp' aspect = gmtpy.aspect_for_projection(*(widget.J() + scaler.R())) widget.set_aspect(aspect) gmt.psbasemap(B='5g5', L=('x%gp/%gp/%g/%g/%gk' % (widget.width() / 2., widget.height() / 7., lon, lat, scale_km)), *(widget.JXY() + scaler.R())) gmt.psxy(in_columns=(lon_grid, lat_grid), S='x10p', W='1p/200/0/0', *(widget.JXY() + scaler.R())) gmt.psxy(in_columns=(lon_grid_alt, lat_grid_alt), S='c10p', W='1p/0/0/200', *(widget.JXY() + scaler.R())) gmt.save('orthodrome.pdf') subprocess.call(['xpdf', '-remote', 'ortho', '-reload']) time.sleep(2) else: print('ok', gsize, lat, lon)
def get_ll_anchor(self): return od.ne_to_latlon(self.lat_center, self.lon_center, -self.track_length / 2, -self.width / 2)
sab_los = d['LOS'][0:n_pix, :] los = num.zeros_like(sab_los) los[:, 0] = sab_los[:, 1] los[:, 1] = sab_los[:, 0] los[:, 2] = sab_los[:, 2] ## init fault geometry n_sources = source_params.shape[1] sources = [] for sps in range(n_sources): Length, Width, Depth, Dip, Strike, Xloc, Yloc, strsl, dipsl, _ = source_params[:, sps] print(Xloc, Yloc) lat, lon = otd.ne_to_latlon(event.lat, event.lon, (Yloc - y_shift) * km, (Xloc - x_shift) * km) rake = math.atan2(dipsl, strsl) print('d,s,r', dipsl, strsl, rake) slip = math.sqrt(strsl**2 + dipsl**2) print('lat,lon', lat, lon) rf = RectangularSource( lat=lat, lon=lon, east_shift=0., north_shift=0., depth=Depth * km, length=Length * km, width=Width * km, dip=Dip + 180., # no negative dip! strike=Strike,
def test_against_kiwi(self): engine = gf.get_engine() store_id = 'chile_70km_crust' try: store = engine.get_store(store_id) except gf.NoSuchStore: logger.warn('GF Store %s not available - skipping test' % store_id) return base_source = gf.RectangularSource( depth=15*km, strike=0., dip=90., rake=0., magnitude=4.5, nucleation_x=-1., length=10*km, width=0*km, stf=gf.BoxcarSTF(duration=1.0)) base_event = base_source.pyrocko_event() channels = 'NEZ' nstations = 10 stations = [] targets = [] for istation in xrange(nstations): dist = rand(40.*km, 900*km) azi = rand(-180., 180.) north_shift = dist * math.cos(azi*d2r) east_shift = dist * math.sin(azi*d2r) lat, lon = od.ne_to_latlon(0., 0., north_shift, east_shift) sta = 'S%02i' % istation station = model.Station( '', sta, '', lat=lat, lon=lon) station.set_channels_by_name('N', 'E', 'Z') stations.append(station) for cha in channels: target = gf.Target( codes=station.nsl() + (cha,), lat=lat, lon=lon, quantity='displacement', interpolation='multilinear', optimization='enable', store_id=store_id) targets.append(target) from tunguska import glue nsources = 10 # nprocs_max = multiprocessing.cpu_count() nprocs = 1 try: seis = glue.start_seismosizer( gfdb_path=op.join(store.store_dir, 'db'), event=base_event, stations=stations, hosts=['localhost']*nprocs, balance_method='123321', effective_dt=0.5, verbose=False) ksource = to_kiwi_source(base_source) seis.set_source(ksource) recs = seis.get_receivers_snapshot(('syn',), (), 'plain') trs = [] for rec in recs: for tr in rec.get_traces(): tr.set_codes(channel=transchan[tr.channel]) trs.append(tr) trs2 = engine.process(base_source, targets).pyrocko_traces() trace.snuffle(trs + trs2) seis.set_synthetic_reference() for sourcetype in ['point', 'rect']: sources = [] for isource in xrange(nsources): m = pmt.MomentTensor.random_dc() strike, dip, rake = map(float, m.both_strike_dip_rake()[0]) if sourcetype == 'point': source = gf.RectangularSource( north_shift=rand(-20.*km, 20*km), east_shift=rand(-20.*km, 20*km), depth=rand(10*km, 20*km), nucleation_x=0.0, nucleation_y=0.0, strike=strike, dip=dip, rake=rake, magnitude=rand(4.0, 5.0), stf=gf.BoxcarSTF(duration=1.0)) elif sourcetype == 'rect': source = gf.RectangularSource( north_shift=rand(-20.*km, 20*km), east_shift=rand(-20.*km, 20*km), depth=rand(10*km, 20*km), length=10*km, width=5*km, nucleation_x=-1., nucleation_y=0, strike=strike, dip=dip, rake=rake, magnitude=rand(4.0, 5.0), stf=gf.BoxcarSTF(duration=1.0)) else: assert False sources.append(source) for temperature in ['cold', 'hot']: t0 = time.time() resp = engine.process(sources, targets, nprocs=nprocs) t1 = time.time() if temperature == 'hot': dur_pyrocko = t1 - t0 del resp ksources = map(to_kiwi_source, sources) for temperature in ['cold', 'hot']: t0 = time.time() seis.make_misfits_for_sources( ksources, show_progress=False) t1 = time.time() if temperature == 'hot': dur_kiwi = t1 - t0 print 'pyrocko %-5s %5.2fs %5.1fx' % ( sourcetype, dur_pyrocko, 1.0) print 'kiwi %-5s %5.2fs %5.1fx' % ( sourcetype, dur_kiwi, dur_pyrocko/dur_kiwi) finally: seis.close() del seis
def get_lr_anchor(self): return od.ne_to_latlon(self.lat_center, self.lon_center, -self.track_length/2, self.width/2)
def plot_erroneous_ne_to_latlon(): import gmtpy import random import subprocess import time while True: w, h = 20, 15 gsize = random.uniform(0., 1.)*4.*10.**random.uniform(4., 7.) north_grid, east_grid = num.meshgrid( num.linspace(-gsize/2., gsize/2., 11), num.linspace(-gsize/2., gsize/2., 11)) north_grid = north_grid.flatten() east_grid = east_grid.flatten() lat_delta = gsize/earthradius*r2d*2. lon = random.uniform(-180., 180.) lat = random.uniform(-90., 90.) print(gsize/1000.) lat_grid, lon_grid = orthodrome.ne_to_latlon( lat, lon, north_grid, east_grid) lat_grid_alt, lon_grid_alt = \ orthodrome.ne_to_latlon_alternative_method( lat, lon, north_grid, east_grid) maxerrlat = num.max(num.abs(lat_grid-lat_grid_alt)) maxerrlon = num.max(num.abs(lon_grid-lon_grid_alt)) eps = 1.0e-8 if maxerrlon > eps or maxerrlat > eps: print(lat, lon, maxerrlat, maxerrlon) gmt = gmtpy.GMT( config={ 'PLOT_DEGREE_FORMAT': 'ddd.xxxF', 'PAPER_MEDIA': 'Custom_%ix%i' % (w*gmtpy.cm, h*gmtpy.cm), 'GRID_PEN_PRIMARY': 'thinnest/0/50/0'}) south = max(-85., lat - 0.5*lat_delta) north = min(85., lat + 0.5*lat_delta) lon_delta = lat_delta/math.cos(lat*d2r) delta = lat_delta/360.*earthradius*2.*math.pi scale_km = gmtpy.nice_value(delta/10.)/1000. west = lon - 0.5*lon_delta east = lon + 0.5*lon_delta x, y = (west, east), (south, north) xax = gmtpy.Ax(mode='min-max', approx_ticks=4.) yax = gmtpy.Ax(mode='min-max', approx_ticks=4.) scaler = gmtpy.ScaleGuru(data_tuples=[(x, y)], axes=(xax, yax)) scaler['R'] = '-Rg' layout = gmt.default_layout() mw = 2.5*gmtpy.cm layout.set_fixed_margins( mw, mw, mw/gmtpy.golden_ratio, mw/gmtpy.golden_ratio) widget = layout.get_widget() # widget['J'] = ('-JT%g/%g' % (lon, lat)) + '/%(width)gp' widget['J'] = ( '-JE%g/%g/%g' % (lon, lat, min(lat_delta/2., 180.)))\ + '/%(width)gp' aspect = gmtpy.aspect_for_projection(*(widget.J() + scaler.R())) widget.set_aspect(aspect) gmt.psbasemap( B='5g5', L=('x%gp/%gp/%g/%g/%gk' % ( widget.width()/2., widget.height()/7., lon, lat, scale_km)), *(widget.JXY()+scaler.R())) gmt.psxy( in_columns=(lon_grid, lat_grid), S='x10p', W='1p/200/0/0', *(widget.JXY()+scaler.R())) gmt.psxy( in_columns=(lon_grid_alt, lat_grid_alt), S='c10p', W='1p/0/0/200', *(widget.JXY()+scaler.R())) gmt.save('orthodrome.pdf') subprocess.call(['xpdf', '-remote', 'ortho', '-reload']) time.sleep(2) else: print('ok', gsize, lat, lon)
def test_fomosto_vs_psgrn_pscmp(self): mod = cake.LayeredModel.from_scanlines(cake.read_nd_model_str(''' 0. 5.8 3.46 2.6 1264. 600. 20. 5.8 3.46 2.6 1264. 600. 20. 6.5 3.85 2.9 1283. 600. 35. 6.5 3.85 2.9 1283. 600. mantle 35. 8.04 4.48 3.58 1449. 600. 77.5 8.045 4.49 3.5 1445. 600. 77.5 8.045 4.49 3.5 180.6 75. 120. 8.05 4.5 3.427 180. 75. 120. 8.05 4.5 3.427 182.6 76.06 165. 8.175 4.509 3.371 188.7 76.55 210. 8.301 4.518 3.324 201. 79.4 210. 8.3 4.52 3.321 336.9 133.3 410. 9.03 4.871 3.504 376.5 146.1 410. 9.36 5.08 3.929 414.1 162.7 660. 10.2 5.611 3.918 428.5 172.9 660. 10.79 5.965 4.229 1349. 549.6'''.lstrip())) store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) store_id = 'psgrn_pscmp_test' version = '2008a' c = psgrn_pscmp.PsGrnPsCmpConfig() c.psgrn_config.sampling_interval = 1. c.psgrn_config.version = version c.pscmp_config.version = version config = gf.meta.ConfigTypeA( id=store_id, ncomponents=10, sample_rate=1. / (3600. * 24.), receiver_depth=0. * km, source_depth_min=0. * km, source_depth_max=5. * km, source_depth_delta=0.1 * km, distance_min=0. * km, distance_max=40. * km, distance_delta=0.1 * km, modelling_code_id='psgrn_pscmp.%s' % version, earthmodel_1d=mod, tabulated_phases=[]) config.validate() gf.store.Store.create_editables( store_dir, config=config, extra={'psgrn_pscmp': c}) store = gf.store.Store(store_dir, 'r') store.close() # build store try: psgrn_pscmp.build(store_dir, nworkers=1) except psgrn_pscmp.PsCmpError as e: if str(e).find('could not start psgrn/pscmp') != -1: logger.warn('psgrn/pscmp not installed; ' 'skipping test_pyrocko_gf_vs_pscmp') return else: raise origin = gf.Location( lat=10., lon=-15.) # test GF store TestRF = dict( lat=origin.lat, lon=origin.lon, depth=2. * km, width=2. * km, length=5. * km, rake=90., dip=45., strike=45., slip=1.) source = gf.RectangularSource(**TestRF) neast = 40 nnorth = 40 N, E = num.meshgrid(num.linspace(-20. * km, 20. * km, nnorth), num.linspace(-20. * km, 20. * km, neast)) starget = gf.StaticTarget( lats=num.array([origin.lat] * N.size), lons=num.array([origin.lon] * N.size), north_shifts=N.flatten(), east_shifts=E.flatten(), interpolation='nearest_neighbor') engine = gf.LocalEngine(store_dirs=[store_dir]) t0 = time() r = engine.process(source, starget) t1 = time() logger.info('pyrocko stacking time %f' % (t1 - t0)) un_fomosto = r.static_results()[0].result['displacement.n'] ue_fomosto = r.static_results()[0].result['displacement.e'] ud_fomosto = r.static_results()[0].result['displacement.d'] # test against direct pscmp output lats, lons = ortd.ne_to_latlon( origin.lat, origin.lon, N.flatten(), E.flatten()) pscmp_sources = [psgrn_pscmp.PsCmpRectangularSource(**TestRF)] cc = c.pscmp_config cc.observation = psgrn_pscmp.PsCmpScatter(lats=lats, lons=lons) cc.rectangular_source_patches = pscmp_sources ccf = psgrn_pscmp.PsCmpConfigFull(**cc.items()) ccf.psgrn_outdir = os.path.join(store_dir, c.gf_outdir) + '/' t2 = time() runner = psgrn_pscmp.PsCmpRunner(keep_tmp=False) runner.run(ccf) ps2du = runner.get_results(component='displ')[0] t3 = time() logger.info('pscmp stacking time %f' % (t3 - t2)) un_pscmp = ps2du[:, 0] ue_pscmp = ps2du[:, 1] ud_pscmp = ps2du[:, 2] num.testing.assert_allclose(un_fomosto, un_pscmp, atol=0.002) num.testing.assert_allclose(ue_fomosto, ue_pscmp, atol=0.002) num.testing.assert_allclose(ud_fomosto, ud_pscmp, atol=0.002)
def test_point_in_polygon(self): if plot: from pyrocko.plot import mpl_graph_color import matplotlib.pyplot as plt from matplotlib.patches import Polygon axes = plt.gca() nip = 100 for i in range(1): np = 3 points = num.zeros((np, 2)) points[:, 0] = random_lat(size=3) points[:, 1] = random_lon(size=3) points_ip = num.zeros((nip*points.shape[0], 2)) for ip in range(points.shape[0]): n, e = orthodrome.latlon_to_ne_numpy( points[ip % np, 0], points[ip % np, 1], points[(ip+1) % np, 0], points[(ip+1) % np, 1]) ns = num.arange(nip) * n / nip es = num.arange(nip) * e / nip lats, lons = orthodrome.ne_to_latlon( points[ip % np, 0], points[ip % np, 1], ns, es) points_ip[ip*nip:(ip+1)*nip, 0] = lats points_ip[ip*nip:(ip+1)*nip, 1] = lons if plot: color = mpl_graph_color(i) axes.add_patch( Polygon( num.fliplr(points_ip), facecolor=light(color), edgecolor=color, alpha=0.5)) points_xyz = orthodrome.latlon_to_xyz(points_ip.T) center_xyz = num.mean(points_xyz, axis=0) assert num.all( orthodrome.distances3d( points_xyz, center_xyz[num.newaxis, :]) < 1.0) lat, lon = orthodrome.xyz_to_latlon(center_xyz) rot = orthodrome.rot_to_00(lat, lon) points_rot_xyz = num.dot(rot, points_xyz.T).T points_rot_pro = orthodrome.stereographic(points_rot_xyz) # noqa poly_xyz = orthodrome.latlon_to_xyz(points_ip) poly_rot_xyz = num.dot(rot, poly_xyz.T).T groups = orthodrome.spoly_cut([poly_rot_xyz], axis=0) num.zeros(points.shape[0], dtype=num.int) if plot: for group in groups: for poly_rot_group_xyz in group: axes.set_xlim(-180., 180.) axes.set_ylim(-90., 90.) plt.show()
class GFPsgrnPscmpTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): unittest.TestCase.__init__(self, *args, **kwargs) self.tempdirs = [] def __del__(self): import shutil for d in self.tempdirs: shutil.rmtree(d) def test_fomosto_vs_psgrn_pscmp(self): mod = cake.LayeredModel.from_scanlines( cake.read_nd_model_str(''' 0. 5.8 3.46 2.6 1264. 600. 20. 5.8 3.46 2.6 1264. 600. 20. 6.5 3.85 2.9 1283. 600. 35. 6.5 3.85 2.9 1283. 600. mantle 35. 8.04 4.48 3.58 1449. 600. 77.5 8.045 4.49 3.5 1445. 600. 77.5 8.045 4.49 3.5 180.6 75. 120. 8.05 4.5 3.427 180. 75. 120. 8.05 4.5 3.427 182.6 76.06 165. 8.175 4.509 3.371 188.7 76.55 210. 8.301 4.518 3.324 201. 79.4 210. 8.3 4.52 3.321 336.9 133.3 410. 9.03 4.871 3.504 376.5 146.1 410. 9.36 5.08 3.929 414.1 162.7 660. 10.2 5.611 3.918 428.5 172.9 660. 10.79 5.965 4.229 1349. 549.6'''.lstrip())) store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) store_id = 'psgrn_pscmp_test' version = '2008a' c = psgrn_pscmp.PsGrnPsCmpConfig() c.psgrn_config.sampling_interval = 1. c.psgrn_config.version = version c.pscmp_config.version = version config = gf.meta.ConfigTypeA(id=store_id, ncomponents=10, sample_rate=1. / (3600. * 24.), receiver_depth=0. * km, source_depth_min=0. * km, source_depth_max=5. * km, source_depth_delta=0.1 * km, distance_min=0. * km, distance_max=40. * km, distance_delta=0.1 * km, modelling_code_id='psgrn_pscmp.%s' % version, earthmodel_1d=mod, tabulated_phases=[]) config.validate() gf.store.Store.create_editables(store_dir, config=config, extra={'psgrn_pscmp': c}) store = gf.store.Store(store_dir, 'r') store.close() # build store try: psgrn_pscmp.build(store_dir, nworkers=1) except psgrn_pscmp.PsCmpError, e: if str(e).find('could not start psgrn/pscmp') != -1: logger.warn('psgrn/pscmp not installed; ' 'skipping test_pyrocko_gf_vs_pscmp') return else: raise origin = gf.Location(lat=10., lon=-15.) # test GF store TestRF = dict(lat=origin.lat, lon=origin.lon, depth=2. * km, width=2. * km, length=5. * km, rake=90., dip=45., strike=45., slip=1.) source = gf.RectangularSource(**TestRF) neast = 40 nnorth = 40 N, E = num.meshgrid(num.linspace(-20. * km, 20. * km, nnorth), num.linspace(-20. * km, 20. * km, neast)) starget = gf.StaticTarget(lats=num.array([origin.lat] * N.size), lons=num.array([origin.lon] * N.size), north_shifts=N.flatten(), east_shifts=E.flatten(), interpolation='nearest_neighbor') engine = gf.LocalEngine(store_dirs=[store_dir]) t0 = time() r = engine.process(source, starget) t1 = time() logger.info('pyrocko stacking time %f' % (t1 - t0)) un_fomosto = r.static_results()[0].result['displacement.n'] ue_fomosto = r.static_results()[0].result['displacement.e'] ud_fomosto = r.static_results()[0].result['displacement.d'] # test against direct pscmp output lats, lons = ortd.ne_to_latlon(origin.lat, origin.lon, N.flatten(), E.flatten()) pscmp_sources = [psgrn_pscmp.PsCmpRectangularSource(**TestRF)] cc = c.pscmp_config cc.observation = psgrn_pscmp.PsCmpScatter(lats=lats, lons=lons) cc.rectangular_source_patches = pscmp_sources ccf = psgrn_pscmp.PsCmpConfigFull(**cc.items()) ccf.psgrn_outdir = os.path.join(store_dir, c.gf_outdir) + '/' t2 = time() runner = psgrn_pscmp.PsCmpRunner(keep_tmp=False) runner.run(ccf) ps2du = runner.get_results(component='displ')[0] t3 = time() logger.info('pscmp stacking time %f' % (t3 - t2)) un_pscmp = ps2du[:, 0] ue_pscmp = ps2du[:, 1] ud_pscmp = ps2du[:, 2] num.testing.assert_allclose(un_fomosto, un_pscmp, atol=0.002) num.testing.assert_allclose(ue_fomosto, ue_pscmp, atol=0.002) num.testing.assert_allclose(ud_fomosto, ud_pscmp, atol=0.002)
def draw_plates(self): from pyrocko.dataset import tectonics neast = 20 nnorth = max(1, int(round(num.round(self._hreg/self._wreg * neast)))) norths = num.linspace(-self._hreg*0.5, self._hreg*0.5, nnorth) easts = num.linspace(-self._wreg*0.5, self._wreg*0.5, neast) norths2 = num.repeat(norths, neast) easts2 = num.tile(easts, nnorth) lats, lons = od.ne_to_latlon( self.lat, self.lon, norths2, easts2) bird = tectonics.PeterBird2003() plates = bird.get_plates() color_plates = gmtpy.color('aluminium5') color_velocities = gmtpy.color('skyblue1') color_velocities_lab = gmtpy.color(darken(gmtpy.color_tup('skyblue1'))) points = num.vstack((lats, lons)).T used = [] for plate in plates: mask = plate.contains_points(points) if num.any(mask): used.append((plate, mask)) if len(used) > 1: candi_fixed = {} label_data = [] for plate, mask in used: mean_north = num.mean(norths2[mask]) mean_east = num.mean(easts2[mask]) iorder = num.argsort(num.sqrt( (norths2[mask] - mean_north)**2 + (easts2[mask] - mean_east)**2)) lat_candis = lats[mask][iorder] lon_candis = lons[mask][iorder] candi_fixed[plate.name] = lat_candis.size label_data.append(( lat_candis, lon_candis, plate, color_plates)) boundaries = bird.get_boundaries() size = 2 psxy_kwargs = [] for boundary in boundaries: if num.any(points_in_region(boundary.points, self._wesn)): for typ, part in boundary.split_types( [['SUB'], ['OSR', 'OTF', 'OCB', 'CTF', 'CCB', 'CRB']]): lats, lons = part.T kwargs = {} if typ[0] == 'SUB': if boundary.kind == '\\': kwargs['S'] = 'f%g/%gp+t+r' % ( 0.45*size, 3.*size) elif boundary.kind == '/': kwargs['S'] = 'f%g/%gp+t+l' % ( 0.45*size, 3.*size) kwargs['G'] = color_plates kwargs['in_columns'] = (lons, lats) kwargs['W'] = '%gp,%s' % (size, color_plates), psxy_kwargs.append(kwargs) if boundary.kind == '\\': if boundary.name2 in candi_fixed: candi_fixed[boundary.name2] += neast*nnorth elif boundary.kind == '/': if boundary.name1 in candi_fixed: candi_fixed[boundary.name1] += neast*nnorth candi_fixed = [name for name in sorted( list(candi_fixed.keys()), key=lambda name: -candi_fixed[name])] candi_fixed.append(None) gsrm = tectonics.GSRM1() for name in candi_fixed: if name not in gsrm.plate_names() \ and name not in gsrm.plate_alt_names(): continue lats, lons, vnorth, veast, vnorth_err, veast_err, corr = \ gsrm.get_velocities(name, region=self._wesn) fixed_plate_name = name self.gmt.psvelo( in_columns=( lons, lats, veast, vnorth, veast_err, vnorth_err, corr), W='0.25p,%s' % color_velocities, A='9p+e+g%s' % color_velocities, S='e0.2p/0.95/10', *self.jxyr) for _ in range(len(lons) // 50 + 1): ii = random.randint(0, len(lons)-1) v = math.sqrt(vnorth[ii]**2 + veast[ii]**2) self.add_label( lats[ii], lons[ii], '%.0f' % v, font_size=0.7*self.gmt.label_font_size(), style=dict( G=color_velocities_lab)) break for (lat_candis, lon_candis, plate, color) in label_data: full_name = bird.full_name(plate.name) if plate.name == fixed_plate_name: full_name = '@_' + full_name + '@_' self.add_area_label( lat_candis, lon_candis, full_name, color=color, font='3') for kwargs in psxy_kwargs: self.gmt.psxy(*self.jxyr, **kwargs)
def ellipse_lat_lon(major, minor, azimuth, lat, lon): points = ellipse(major, minor, azimuth) return orthodrome.ne_to_latlon(lat, lon, points[:, 0], points[:, 1])
z = db.firstz + iz*db.dz p.stdin.write("%g %g T T\n" % (x,z)) p.stdin.flush() p.stdin.close() p.wait() elif command == 'syntheseis': olat, olon = 30., 70. receivers = [] distances = num.linspace(3000, 4000, 10) for dist in distances: lat, lon = orthodrome.ne_to_latlon(olat, olon, dist, 0.) r = receiver.Receiver(lat,lon, components='ned') receivers.append(r) db = gfdb.Gfdb('benchdb') seis = seismosizer.Seismosizer(hosts=['localhost']*4) seis.set_database(db) seis.set_effective_dt(0.1) seis.set_local_interpolation('bilinear') seis.set_receivers(receivers) seis.set_source_location( olat, olon, 0.0) s = source.Source('bilateral', sourceparams_str='0 0 0 5000 1e12 91 87 164 0 0 0 0 2500 0.2')
def corners(lon, lat, w, h): ll_lat, ll_lon = od.ne_to_latlon(lat, lon, -0.5*h, -0.5*w) ur_lat, ur_lon = od.ne_to_latlon(lat, lon, 0.5*h, 0.5*w) return ll_lon, ll_lat, ur_lon, ur_lat
from pyrocko import orthodrome # arguments: origin lat, origin lon, north [m], east [m] lat, lon = orthodrome.ne_to_latlon(37.58, 57.11, 3000.0, 10000.0) print(lat, lon)
x = db.firstx + ix * db.dx z = db.firstz + iz * db.dz p.stdin.write("%g %g T T\n" % (x, z)) p.stdin.flush() p.stdin.close() p.wait() elif command == 'syntheseis': olat, olon = 30., 70. receivers = [] distances = num.linspace(3000, 4000, 10) for dist in distances: lat, lon = orthodrome.ne_to_latlon(olat, olon, dist, 0.) r = receiver.Receiver(lat, lon, components='ned') receivers.append(r) db = gfdb.Gfdb('benchdb') seis = seismosizer.Seismosizer(hosts=['localhost'] * 4) seis.set_database(db) seis.set_effective_dt(0.1) seis.set_local_interpolation('bilinear') seis.set_receivers(receivers) seis.set_source_location(olat, olon, 0.0) s = source.Source( 'bilateral', sourceparams_str='0 0 0 5000 1e12 91 87 164 0 0 0 0 2500 0.2')
from pyrocko import orthodrome # arguments: origin lat, origin lon, north [m], east [m] lat, lon = orthodrome.ne_to_latlon(10.3, 12.4, 22200., 21821.) print("latitude: %s, longitude: %s " % (lat, lon)) # >>> latitude: 10.4995878932, longitude: 12.5995823469