예제 #1
0
Longitudes and latitudes are geodetic (not geocentric). For projections the
WGS84 geographic coordinate system is used. The center of the reference
ellipsoid coincides with the center of mass of the Earth. Z points to the north
pole.
"""

import numpy as np
import pyproj

from ..apy import (PRECISION, Object, is_number, is_non_neg_number, is_integer,
                   is_1d_numeric_array, is_in_range)
from .. import space3
from ..units import MOTION as UNITS
from . import flat

_G = pyproj.Geod(ellps='WGS84')  ## The WGS84 reference ellipsoid


class Point(Object):
    """
    """
    def __init__(self, lon, lat, alt=0., validate=True):
        """
        alt: number, alt of point (in m) (default: 0.)
        """
        if validate is True:
            assert (is_number(lon) and is_lon(lon))
            assert (is_number(lat) and is_lat(lat))
            assert (is_number(alt))

        if abs(lon) < 10**-PRECISION:
예제 #2
0
파일: ship_utils.py 프로젝트: luckylixu/ACT
def calc_cog_sog(obj):
    """
    This function calculates the course and speed over ground of a moving
    platform using the lat/lon.  Note,data are resampled to 1 minute in
    order to provide a better estimate of speed/course compared with 1 second.

    Function is set up to use dask for the calculations in order to improve
    efficiency. Data are then resampled to 1 second to match native format.
    This assumes that the input data are 1 second.  See this `example
    <https://ARM-DOE.github.io/ACT/source/auto_examples/correct_ship_wind_data.html
    #sphx-glr-source-auto-examples-correct-ship-wind-data-py>`_.

    Parameters
    ----------
    obj: ACT Dataset
        ACT Dataset to calculate COG/SOG from.  Assumes lat/lon are variables and
        that it's 1-second data

    Returns
    -------
    obj: ACT Dataset
        Returns object with course_over_ground and speed_over_ground variables

    """

    # Convert data to 1 minute in order to get proper values
    new_obj = obj.resample(time='1min').nearest()

    # Get lat and lon data
    if 'lat' in new_obj:
        lat = new_obj['lat']
    elif 'latitude' in new_obj:
        lat = new_obj['latitude']
    else:
        return new_obj

    if 'lon' in new_obj:
        lon = new_obj['lon']
    elif 'longitude' in new_obj:
        lat = new_obj['longitude']
    else:
        return new_obj

    # Set pyproj Geod
    _GEOD = pyproj.Geod(ellps='WGS84')

    # Set up delayed tasks for dask
    task = []
    time = new_obj['time'].values
    for i in range(len(lat) - 1):
        task.append(
            dask.delayed(proc_scog)(_GEOD, lon[i + 1], lat[i + 1], lon[i],
                                    lat[i], time[i], time[i + 1]))

    # Compute and process results Adding 2 values
    # to the end to make up for the missing times
    results = dask.compute(*task)
    sog = [r[0] for r in results]
    sog.append(sog[-1])
    sog.append(sog[-1])
    cog = [r[1] for r in results]
    cog.append(cog[-1])
    cog.append(cog[-1])
    time = np.append(time, time[-1] + np.timedelta64(1, 'm'))

    atts = {'long_name': 'Speed over ground', 'units': 'm/s'}
    sog_da = xr.DataArray(sog,
                          coords={'time': time},
                          dims=['time'],
                          attrs=atts)
    sog_da = sog_da.resample(time='1s').nearest()

    atts = {'long_name': 'Course over ground', 'units': 'deg'}
    cog_da = xr.DataArray(cog,
                          coords={'time': time},
                          dims=['time'],
                          attrs=atts)
    cog_da = cog_da.resample(time='1s').nearest()

    obj['course_over_ground'] = cog_da
    obj['speed_over_ground'] = sog_da

    return obj
예제 #3
0
# Plot the following two stations as red triangles on a map and calculate
# their distance, azimuth and back-azimuth for a spherical earth.
# SULZ: lat=47.52748, lon=8.11153
# SALO: lat=45.6183, lon=10.5243
lats = [47.52748, 45.6183]
lons = [8.11153, 10.5243]
names = ['SULZ', 'SALO']
m = Basemap(projection='merc', llcrnrlat=43, urcrnrlat=48, \
            llcrnrlon=4, urcrnrlon=12, lat_ts=45, resolution='i')
x, y = m(lons, lats)
m.drawcountries()
m.scatter(x, y, 100, color="r", marker="^")
for i, name in enumerate(names):
    plt.text(x[i], y[i], name, va='top')
gc = pyproj.Geod(ellps='sphere')
az, baz, dist = gc.inv(lons[0], lats[0], lons[1], lats[1])
print "azimuth=%.2f, back-azimuth=%.2f, distance=%.2f km" % (az, baz,
                                                             dist / 1000.)
plt.show()

# Plot the real component of the spherical harmonics of order
# 2 and degree 5 on a sphere.
# The modules that you will need are:
# scipy.special (the function sph_harm)
# numpy
# basemap
# matplotlib
#
# Note: Spherical harmonics are computed in colatitude and basemap
# uses latitude.
예제 #4
0
 def test_nearest_sw_corner(self):
     geod = pyproj.Geod(ellps='sphere')
     mg = sg.gridio.read_mitgridfile('./data/tile005.mitgrid', 270, 90)
     i, j, dist = sg.util.nearest(-128., 67.5, mg['XG'], mg['YG'], geod)
     self.assertEqual((i, j), (0, 0))
     self.assertAlmostEqual(dist, 1.20941759e-09)
예제 #5
0
def calc_cog_sog(obj):
    """
    This procedure will create a new ACT dataset whose coordinates are
    designated to be the variables in a given list. This helps make data
    slicing via xarray and visualization easier.

    Parameters
    ----------
    obj: ACT Dataset
        The ACT Dataset to modify the coordinates of.

    Returns
    -------
    obj: ACT Dataset
        Returns object with course_over_ground and speed_over_ground variables

    """

    # Convert data to 1 minute in order to get proper values
    new_obj = obj.resample(time='1min').nearest()

    # Get lat and lon data
    if 'lat' in new_obj:
        lat = new_obj['lat']
    elif 'latitude' in new_obj:
        lat = new_obj['latitude']
    else:
        return new_obj

    if 'lon' in new_obj:
        lon = new_obj['lon']
    elif 'longitude' in new_obj:
        lat = new_obj['longitude']
    else:
        return new_obj

    # Set pyproj Geod
    _GEOD = pyproj.Geod(ellps='WGS84')

    # Set up delayed tasks for dask
    task = []
    time = new_obj['time'].values
    for i in range(len(lat) - 1):
        task.append(
            dask.delayed(proc_scog)(_GEOD, lon[i + 1], lat[i + 1], lon[i],
                                    lat[i], time[i], time[i + 1]))

    # Compute and process results Adding 2 values
    # to the end to make up for the missing times
    results = dask.compute(*task)
    sog = [r[0] for r in results]
    sog.append(sog[-1])
    sog.append(sog[-1])
    cog = [r[1] for r in results]
    cog.append(cog[-1])
    cog.append(cog[-1])
    time = np.append(time, time[-1] + np.timedelta64(1, 'm'))

    atts = {'long_name': 'Speed over ground', 'units': 'm/s'}
    sog_da = xr.DataArray(sog,
                          coords={'time': time},
                          dims=['time'],
                          attrs=atts)
    sog_da = sog_da.resample(time='1s').nearest()

    atts = {'long_name': 'Course over ground', 'units': 'deg'}
    cog_da = xr.DataArray(cog,
                          coords={'time': time},
                          dims=['time'],
                          attrs=atts)
    cog_da = cog_da.resample(time='1s').nearest()

    obj['course_over_ground'] = cog_da
    obj['speed_over_ground'] = sog_da

    return obj
예제 #6
0
def minute_track_from_30sec_data_leg0():
    """
        Function to read the 1/30 second meteorological data and drive the ships velocity form the low resolution ship track. Some basic filtering is applied to remove faulty data.

        :returns: data frame containing the ships track and velocity at 1 minute resolution for leg 0
    """
    # before '2016-11-27 10:01:30' we have no 1/sec latitude/longitude record
    # here we load the 1/30sec metdata file and use these latitude/longitude to estimate ship velocity
    # some despiking is needed here!
    csv_file_folder = './data/summary_raw_meteorologic_10/'

    met_all_leg0 = pd.read_csv(
        csv_file_folder + 'metdata_all_20161119_20161216.csv',
        usecols=['date_time', 'latitude', 'longitude', 'TIMEDIFF'])

    met_all_leg0_systime = met_all_leg0.copy
    met_all_leg0_systime = met_all_leg0.set_index(
        pd.to_datetime(met_all_leg0.date_time, format="%Y-%m-%d %H:%M:%S"))
    met_all_leg0 = met_all_leg0.set_index(
        (pd.to_datetime(met_all_leg0.date_time, format="%Y-%m-%d %H:%M:%S") +
         pd.to_timedelta(met_all_leg0.TIMEDIFF,
                         unit='s')))  # assing the time stamp

    # filtering some unrealistic coordinates
    bad_lad_long = ((met_all_leg0.latitude > 22) & (met_all_leg0.latitude < 26)
                    & (met_all_leg0.longitude > -14) &
                    (met_all_leg0.longitude < 2))
    met_all_leg0.at[bad_lad_long, 'latitude'] = np.nan
    met_all_leg0.at[bad_lad_long, 'longitude'] = np.nan

    # this bounds work for leg0
    met_all_leg0.at[(met_all_leg0.latitude < -36), 'latitude'] = np.nan
    met_all_leg0.at[(met_all_leg0.latitude > 57), 'latitude'] = np.nan

    SOG = np.ones_like(met_all_leg0.latitude) * np.nan
    COG = np.ones_like(met_all_leg0.latitude) * np.nan
    velEast = np.ones_like(met_all_leg0.latitude) * np.nan
    velNorth = np.ones_like(met_all_leg0.latitude) * np.nan

    lon1 = np.array(met_all_leg0.longitude[0:-1])
    lat1 = np.array(met_all_leg0.latitude[0:-1])
    lon2 = np.array(met_all_leg0.longitude[1:])
    lat2 = np.array(met_all_leg0.latitude[1:])

    # calculate rolling mean of latitudes to filter outliers later
    met_all_leg0_r = met_all_leg0.rolling(window=5, center=True).mean()
    met_all_leg0_r.at[(met_all_leg0_r.latitude < -36), 'latitude'] = np.nan
    met_all_leg0_r.at[(met_all_leg0_r.latitude > 57), 'latitude'] = np.nan

    lon1_r = np.array(met_all_leg0_r['longitude'][0:-1])
    lat1_r = np.array(met_all_leg0_r['latitude'][0:-1])
    lon2_r = np.array(met_all_leg0_r['longitude'][1:])
    lat2_r = np.array(met_all_leg0_r['latitude'][1:])

    dt = (met_all_leg0.index[1:] - met_all_leg0.index[0:-1])
    #dt = (met_all_leg0_systime.index[1:]-met_all_leg0_systime.index[0:-1]);
    dt = np.array(dt.total_seconds())

    MissingValues = np.where(np.isnan(lon1 * lon2 * lat1 * lat2))
    MissingValues_r = np.where(
        np.isnan(lon1 * lon2 * lat1 * lat2 * lon1_r * lon2_r * lat1_r *
                 lat2_r))

    lon1[MissingValues] = 0
    lon2[MissingValues] = 0
    lat1[MissingValues] = 0
    lat2[MissingValues] = 0

    (az12, az21, dist) = pyproj.Geod(ellps=sphere_model_for_pyproj).inv(
        lon1, lat1, lon2, lat2)

    lon1[MissingValues_r] = 0
    lon2[MissingValues_r] = 0
    lat1[MissingValues_r] = 0
    lat2[MissingValues_r] = 0

    # calculate how fare a point deviates from the rolling mean coordinates and use this to flag outliers
    lon1_r[MissingValues_r] = 0
    lon2_r[MissingValues_r] = 0
    lat1_r[MissingValues_r] = 0
    lat2_r[MissingValues_r] = 0
    (_, _, dist_r1) = pyproj.Geod(ellps=sphere_model_for_pyproj).inv(
        lon1, lat1, lon1_r, lat1_r)
    (_, _, dist_r2) = pyproj.Geod(ellps=sphere_model_for_pyproj).inv(
        lon2, lat2, lon2_r, lat2_r)

    DIST_R1 = np.ones_like(met_all_leg0.latitude) * np.nan

    DIST_R1[1:-1] = np.nanmax([dist_r1[0:-1], dist_r1[1:]],
                              axis=0).transpose()
    # calculate sog as avg of vel(jj-1,jj) and vel(jj,jj+1)
    DIST_R1[0] = dist_r1[0]
    DIST_R1[-1] = dist_r1[-1]
    # fill first and last reading

    DIST_R2 = np.ones_like(met_all_leg0.latitude) * np.nan

    DIST_R2[1:-1] = np.nanmax([dist_r2[0:-1], dist_r2[1:]],
                              axis=0).transpose()
    # calculate sog as avg of vel(jj-1,jj) and vel(jj,jj+1)
    DIST_R2[0] = dist_r2[0]
    DIST_R2[-1] = dist_r2[-1]
    # fill first and last reading

    vel = np.true_divide(dist, dt)
    vel[MissingValues] = np.nan
    print(
        str(np.sum(vel > (12))) +
        'samples with velocity larger than 12 m/s')  # give this more tolerance
    vel[(vel > (12))] = np.nan  # cut unrealistic velocities (vel > 12 m/s)
    vel[(
        dt > (1 * 50)
    )] = np.nan  # set time diffs with dt>50sec to NaN to  (don't trust edge of data gap velocities)
    vel[(dist > 300)] = np.nan  # was 200
    vel[(dist_r1 > 600)] = np.nan
    vel[(dist_r2 > 600)] = np.nan

    vel[np.abs(dt - np.round(dt)) >
        0.015] = np.nan  # these seam to be very noisy in position

    print(np.nanmean(vel))

    print(np.nanmean(vel))
    evel = np.cos((-az12 + 90) * np.pi / 180) * vel
    nvel = np.sin((-az12 + 90) * np.pi / 180) * vel

    timest_vel = met_all_leg0.index[1:] + pd.to_timedelta(
        dt * 0.5, unit='s')  # just to have a time stamp to plot vel against

    # note SOG, COG are recomputed from velEast velNorth below!
    SOG[1:-1] = np.nanmean([vel[0:-1], vel[1:]], axis=0)
    # calculate sog as avg of vel(jj-1,jj) and vel(jj,jj+1)
    SOG[0] = vel[0]
    SOG[-1] = vel[-1]
    # fill first and last reading

    COG[1:-1] = np.rad2deg((np.arctan2(
        np.nanmean([
            vel[0:-1] * np.sin(np.deg2rad(az12[0:-1])),
            vel[1:] * np.sin(np.deg2rad(az12[1:]))
        ],
                   axis=0),
        np.nanmean([
            vel[0:-1] * np.cos(np.deg2rad(az12[0:-1])),
            vel[1:] * np.cos(np.deg2rad(az12[1:]))
        ],
                   axis=0)) + 2 * np.pi) % (2 * np.pi))
    COG[0] = az12[0]
    COG[-1] = az12[-1]
    # fill first and last reading

    velEast[1:-1] = np.nanmean([evel[0:-1], evel[1:]], axis=0)
    velEast[0] = evel[0]
    velEast[-1] = evel[-1]
    velNorth[1:-1] = np.nanmean([nvel[0:-1], nvel[1:]], axis=0)
    velNorth[0] = nvel[0]
    velNorth[-1] = nvel[-1]

    #met_all_leg0 = met_all_leg0.assign( DIST_R1 = DIST_R1)
    #met_all_leg0 = met_all_leg0.assign( DIST_R2 = DIST_R2)

    met_all_leg0 = met_all_leg0.assign(SOG=SOG)
    met_all_leg0 = met_all_leg0.assign(COG=COG)
    met_all_leg0 = met_all_leg0.assign(velEast=velEast)
    met_all_leg0 = met_all_leg0.assign(velNorth=velNorth)

    lon_median = met_all_leg0['longitude'].resample(
        str(merge_at_nseconds) + 'S',
        loffset=datetime.timedelta(seconds=(
            merge_at_nseconds *
            .5))).median()  # calculate 6 second mean to match wind data

    SOG = met_all_leg0.SOG.copy()
    SOG_MAX = SOG.resample(
        str(merge_at_nseconds) + 'S',
        loffset=datetime.timedelta(seconds=(merge_at_nseconds * .5))).max()
    SOG_MIN = SOG.resample(
        str(merge_at_nseconds) + 'S',
        loffset=datetime.timedelta(seconds=(merge_at_nseconds * .5))).min()

    met_all_leg0 = met_all_leg0.resample(
        str(merge_at_nseconds) + 'S',
        loffset=datetime.timedelta(
            seconds=(merge_at_nseconds *
                     .5))).mean()  # calculate 1min mean to match wind data
    met_all_leg0 = met_all_leg0.assign(SOG_DIFF=(SOG_MAX - SOG_MIN))
    # here we fix dateline issues of averaging longitudes around +/-180 degree
    # this is a bit coarse, but it works.
    lon_flip_tollerance = 0.0005  # a value well aboved the normal difference of mean and median longitudes
    #met_all_leg0['longitude'][np.abs(met_all_leg0.longitude-lon_median)>lon_flip_tollerance]=lon_median[np.abs(met_all_leg0.longitude-lon_median)>lon_flip_tollerance]

    met_all_leg0 = met_all_leg0.assign(COG=(
        (90 -
         np.rad2deg(np.arctan2(met_all_leg0.velNorth, met_all_leg0.velEast))) %
        360))  # recompute COG from averaged North/Easte velocities

    met_all_leg0.SOG = np.sqrt(
        np.square(met_all_leg0.velNorth) +
        np.square(met_all_leg0.velEast))  # vector average velocity

    bad_lad_long = ((met_all_leg0.latitude > 22) & (met_all_leg0.latitude < 26)
                    & (met_all_leg0.longitude > -14) &
                    (met_all_leg0.longitude < 2))
    met_all_leg0.at[bad_lad_long, 'latitude'] = np.nan
    met_all_leg0.at[bad_lad_long, 'longitude'] = np.nan

    bad_lad_long = ((met_all_leg0.latitude > 3) & (met_all_leg0.latitude < 5) &
                    (met_all_leg0.longitude > -18) &
                    (met_all_leg0.longitude < -17))
    met_all_leg0.at[bad_lad_long, 'latitude'] = np.nan
    met_all_leg0.at[bad_lad_long, 'longitude'] = np.nan
    bad_lad_long = ((met_all_leg0.latitude > -8) & (met_all_leg0.latitude < 5)
                    & (met_all_leg0.longitude > -6.5) &
                    (met_all_leg0.longitude < -4.4))
    met_all_leg0.at[bad_lad_long, 'latitude'] = np.nan
    met_all_leg0.at[bad_lad_long, 'longitude'] = np.nan

    met_all_leg0.at[
        np.isnan(met_all_leg0.SOG),
        'latitude'] = np.nan  # latitude/longitude are also scattering for SOG outliers
    met_all_leg0.at[
        np.isnan(met_all_leg0.SOG),
        'longitude'] = np.nan  # latitude/longitude are also scattering for SOG outliers
    met_all_leg0.at[np.isnan(met_all_leg0.SOG), 'COG'] = np.nan
    met_all_leg0.at[np.isnan(met_all_leg0.SOG), 'velEast'] = np.nan
    met_all_leg0.at[np.isnan(met_all_leg0.SOG), 'velNorth'] = np.nan
    met_all_leg0.at[np.isnan(met_all_leg0.SOG), 'SOG_DIFF'] = np.nan

    # additional velocity filtering based on deviation from rolling window of 10minutes:
    # here we need to interpolated accross nans
    SPIKES = np.abs(met_all_leg0.SOG - met_all_leg0.SOG.interpolate().rolling(
        window=10, center=True).mean()) > .5
    for var_str in ['SOG', 'COG', 'velEast', 'velNorth', 'SOG_DIFF']:
        met_all_leg0.at[SPIKES, var_str] = np.nan

    met_all_leg0.drop(columns=['TIMEDIFF'], inplace=True)
    met_all_leg0.index.name = 'date_time'

    return met_all_leg0
예제 #7
0
    def plot(self,
             variable=None,
             vmin=None,
             vmax=None,
             filename=None,
             title=None):
        """Plot geographical coverage of reader."""

        try:
            from mpl_toolkits.basemap import Basemap
            import matplotlib.pyplot as plt
            from matplotlib.patches import Polygon
        except:
            sys.exit('Basemap is needed to plot coverage map.')

        corners = self.xy2lonlat([self.xmin, self.xmin, self.xmax, self.xmax],
                                 [self.ymax, self.ymin, self.ymax, self.ymin])
        latspan = np.max(corners[1]) - np.min(corners[1])

        # Initialise map
        if latspan < 90:
            # Stereographic projection centred on domain, if small domain
            x0 = (self.xmin + self.xmax) / 2
            y0 = (self.ymin + self.ymax) / 2
            lon0, lat0 = self.xy2lonlat(x0, y0)
            width = np.max([self.xmax - self.xmin, self.ymax - self.ymin
                            ]) * 1.5
            geod = pyproj.Geod(ellps='WGS84')
            # Calculate length of dialogs to determine map size
            d1 = geod.inv(corners[0][0],
                          corners[1][0],
                          corners[0][1],
                          corners[1][1],
                          radians=False)[2]
            d2 = geod.inv(corners[0][2],
                          corners[1][2],
                          corners[0][3],
                          corners[1][3],
                          radians=False)[2]
            width = np.max((d1, d2)) * 3
            map = Basemap(projection='stere',
                          resolution='i',
                          lat_ts=lat0,
                          lat_0=lat0,
                          lon_0=lon0,
                          width=width,
                          height=width)
        else:
            # Global map if reader domain is large
            map = Basemap(np.array(corners[0]).min(),
                          -89,
                          np.array(corners[0]).max(),
                          89,
                          resolution='c',
                          projection='cyl')

        map.drawcoastlines()
        if variable is None:
            map.fillcontinents(color='coral')
        map.drawparallels(np.arange(-90., 90., 5.))
        map.drawmeridians(np.arange(-180., 181., 5.))
        # Get boundary
        npoints = 10  # points per side
        x = np.array([])
        y = np.array([])
        x = np.concatenate((x, np.linspace(self.xmin, self.xmax, npoints)))
        y = np.concatenate((y, [self.ymin] * npoints))
        x = np.concatenate((x, [self.xmax] * npoints))
        y = np.concatenate((y, np.linspace(self.ymin, self.ymax, npoints)))
        x = np.concatenate((x, np.linspace(self.xmax, self.xmin, npoints)))
        y = np.concatenate((y, [self.ymax] * npoints))
        x = np.concatenate((x, [self.xmin] * npoints))
        y = np.concatenate((y, np.linspace(self.ymax, self.ymin, npoints)))
        # from x/y vectors create a Patch to be added to map
        lon, lat = self.xy2lonlat(x, y)
        mapproj = pyproj.Proj(map.proj4string)
        # Cut at 89 deg N/S to avoid singularity at poles
        lat[lat > 89] = 89
        lat[lat < -89] = -89
        xm, ym = mapproj(lon, lat)
        xm, ym = map(lon, lat)
        #map.plot(xm, ym, color='gray')
        if variable is None:
            boundary = Polygon(zip(xm, ym), alpha=0.5, ec='k', fc='b')
            plt.gca().add_patch(boundary)
# add patch to the map
        if title is None:
            plt.title(self.name)
        else:
            plt.title(title)
        plt.xlabel('Time coverage: %s to %s' %
                   (self.start_time, self.end_time))

        if variable is not None:
            rx = np.array([self.xmin, self.xmax])
            ry = np.array([self.ymin, self.ymax])
            data = self.get_variables(variable,
                                      self.start_time,
                                      rx,
                                      ry,
                                      block=True)
            rx, ry = np.meshgrid(data['x'], data['y'])
            rlon, rlat = self.xy2lonlat(rx, ry)
            map_x, map_y = map(rlon, rlat, inverse=False)
            data[variable] = np.ma.masked_invalid(data[variable])
            if hasattr(self, 'convolve'):
                from scipy import ndimage
                N = self.convolve
                if isinstance(N, (int, np.integer)):
                    kernel = np.ones((N, N))
                    kernel = kernel / kernel.sum()
                else:
                    kernel = N
                logging.debug('Convolving variables with kernel: %s' % kernel)
                data[variable] = ndimage.convolve(data[variable],
                                                  kernel,
                                                  mode='nearest')
            map.pcolormesh(map_x, map_y, data[variable], vmin=vmin, vmax=vmax)
            cbar = map.colorbar()
            cbar.set_label(variable)

        try:  # Activate figure zooming
            mng = plt.get_current_fig_manager()
            mng.toolbar.zoom()
        except:
            pass

        if filename is not None:
            plt.savefig(filename)
            plt.close()
        else:
            plt.show()
예제 #8
0
파일: crs.py 프로젝트: jmjak86/karta
 def __init__(self, spheroid, name):
     self._geod = pyproj.Geod(spheroid)
     self.name = name
     return
예제 #9
0
    def advect_ocean_current(self):
        # Runge-Kutta scheme
        if self.get_config('drift:scheme')[0:11] == 'runge-kutta':
            x_vel = self.environment.x_sea_water_velocity
            y_vel = self.environment.y_sea_water_velocity

            # Find midpoint
            az = np.degrees(np.arctan2(x_vel, y_vel))
            speed = np.sqrt(x_vel * x_vel + y_vel * y_vel)
            dist = speed * self.time_step.total_seconds() * .5
            geod = pyproj.Geod(ellps='WGS84')
            mid_lon, mid_lat, dummy = geod.fwd(self.elements.lon,
                                               self.elements.lat,
                                               az,
                                               dist,
                                               radians=False)
            # Find current at midpoint, a half timestep later
            logging.debug('Runge-kutta, fetching half time-step later...')
            mid_env, profiles, missing = self.get_environment(
                ['x_sea_water_velocity', 'y_sea_water_velocity'],
                self.time + self.time_step / 2,
                mid_lon,
                mid_lat,
                self.elements.z,
                profiles=None)
            if self.get_config('drift:scheme') == 'runge-kutta4':
                logging.debug('Runge-kutta 4th order...')
                x_vel2 = mid_env['x_sea_water_velocity']
                y_vel2 = mid_env['y_sea_water_velocity']
                az2 = np.degrees(np.arctan2(x_vel2, y_vel2))
                speed2 = np.sqrt(x_vel2 * x_vel2 + y_vel2 * y_vel2)
                dist2 = speed2 * self.time_step.total_seconds() * .5
                lon2, lat2, dummy = \
                    geod.fwd(self.elements.lon,
                             self.elements.lat,
                             az2, dist2, radians=False)
                env2, profiles, missing = self.get_environment(
                    ['x_sea_water_velocity', 'y_sea_water_velocity'],
                    self.time + self.time_step / 2,
                    lon2,
                    lat2,
                    self.elements.z,
                    profiles=None)
                # Third step
                x_vel3 = env2['x_sea_water_velocity']
                y_vel3 = env2['y_sea_water_velocity']
                az3 = np.degrees(np.arctan2(x_vel3, y_vel3))
                speed3 = np.sqrt(x_vel3 * x_vel3 + y_vel3 * y_vel3)
                dist3 = speed3 * self.time_step.total_seconds() * .5
                lon3, lat3, dummy = \
                    geod.fwd(self.elements.lon,
                             self.elements.lat,
                             az3, dist3, radians=False)
                env3, profiles, missing = self.get_environment(
                    ['x_sea_water_velocity', 'y_sea_water_velocity'],
                    self.time + self.time_step,
                    lon3,
                    lat3,
                    self.elements.z,
                    profiles=None)
                # Fourth step
                x_vel4 = env3['x_sea_water_velocity']
                y_vel4 = env3['y_sea_water_velocity']
                u4 = (x_vel + 2 * x_vel2 + 2 * x_vel3 + x_vel4) / 6.0
                v4 = (y_vel + 2 * y_vel2 + 2 * y_vel3 + y_vel4) / 6.0
                # Move particles using runge-kutta4 velocity
                self.update_positions(u4, v4)

            else:
                # Move particles using runge-kutta velocity
                self.update_positions(mid_env['x_sea_water_velocity'],
                                      mid_env['y_sea_water_velocity'])
        elif self.get_config('drift:scheme') == 'euler':
            # Euler scheme
            self.update_positions(self.environment.x_sea_water_velocity,
                                  self.environment.y_sea_water_velocity)
        else:
            raise ValueError('Drift scheme not recognised: ' +
                             self.get_config('drift:scheme'))
예제 #10
0
def test_basic_simulation():
    ## CREATION OF GRAPH
    Node = type(
        'Site',
        (core.Identifiable, core.Log, core.Locatable, core.HasResource), {})
    nodes = []
    path = []

    distances = [550, 500, 300, 500, 150, 150, 500, 300, 500, 550]
    coords = []
    coords.append([0, 0])

    for d in range(len(distances)):
        coords.append([
            pyproj.Geod(ellps="WGS84").fwd(coords[d][0], coords[d][1], 90,
                                           distances[d])[0], 0
        ])

    for d in range(len(coords)):
        data_node = {
            "env": [],
            "name": "Node " + str(d + 1),
            "geometry": shapely.geometry.Point(coords[d][0], coords[d][1])
        }
        node = Node(**data_node)
        nodes.append(node)

    for i in range(2):
        if i == 0:
            for j in range(len(nodes) - 1):
                path.append([nodes[j], nodes[j + 1]])
        if i == 1:
            for j in range(len(nodes) - 1):
                path.append([nodes[j + 1], nodes[j]])

    FG = nx.DiGraph()

    positions = {}
    for node in nodes:
        positions[node.name] = (node.geometry.x, node.geometry.y)
        FG.add_node(node.name, geometry=node.geometry)

    for edge in path:
        FG.add_edge(edge[0].name, edge[1].name, weight=1)

    ## SIMULATION SET-UP
    simulation_start = datetime.datetime.now()
    env = simpy.Environment(
        initial_time=time.mktime(simulation_start.timetuple()))

    ## CREATION OF VESSELS
    Vessel = type('Vessel',
                  (core.Identifiable, core.Movable, core.HasContainer,
                   core.HasResource, core.Routeable, core.VesselProperties),
                  {})
    start_point = 'Node 1'
    end_point = 'Node 11'

    data_vessel_one = {
        "env": env,
        "name": "Vessel",
        "route": nx.dijkstra_path(FG, end_point, start_point, weight='length'),
        "geometry": FG.nodes[end_point]['geometry'],
        "capacity": 1_000,
        "v": 4,
        "type": 'CEMT - Va',
        "B": 10,
        "L": 135.0
    }

    env.FG = FG
    vessels = []

    vessel = Vessel(**data_vessel_one)
    vessels.append(vessel)

    ## SYSTEM PARAMETERS
    # water level difference
    wlev_dif = [np.linspace(0, 45000, 1000), np.zeros(1000)]
    for i in range(len(wlev_dif[0])):
        wlev_dif[1][i] = 2

    # lock area parameters
    waiting_area_1 = core.IsLockWaitingArea(env=env,
                                            nr_resources=1,
                                            priority=True,
                                            name='Volkeraksluizen_1',
                                            node="Node 2")

    lineup_area_1 = core.IsLockLineUpArea(env=env,
                                          nr_resources=1,
                                          priority=True,
                                          name='Volkeraksluizen_1',
                                          node="Node 3",
                                          lineup_length=300)

    lock_1 = core.IsLock(env=env,
                         nr_resources=100,
                         priority=True,
                         name='Volkeraksluizen_1',
                         node_1="Node 5",
                         node_2="Node 6",
                         node_3="Node 7",
                         lock_length=300,
                         lock_width=24,
                         lock_depth=4.5,
                         doors_open=10 * 60,
                         doors_close=10 * 60,
                         wlev_dif=wlev_dif,
                         disch_coeff=0.8,
                         grav_acc=9.81,
                         opening_area=4.0,
                         opening_depth=5.0,
                         simulation_start=simulation_start,
                         operating_time=25 * 60)

    waiting_area_2 = core.IsLockWaitingArea(env=env,
                                            nr_resources=1,
                                            priority=True,
                                            name="Volkeraksluizen_1",
                                            node="Node 10")

    lineup_area_2 = core.IsLockLineUpArea(env=env,
                                          nr_resources=1,
                                          priority=True,
                                          name="Volkeraksluizen_1",
                                          node="Node 9",
                                          lineup_length=300)

    lock_1.water_level = "Node 5"

    #location of lock areas in graph
    FG.nodes["Node 6"]["Lock"] = [lock_1]

    FG.nodes["Node 2"]["Waiting area"] = [waiting_area_1]
    FG.nodes["Node 3"]["Line-up area"] = [lineup_area_1]

    FG.nodes["Node 10"]["Waiting area"] = [waiting_area_2]
    FG.nodes["Node 9"]["Line-up area"] = [lineup_area_2]

    ## INITIATE VESSELS
    for vessel in vessels:
        vessel.env = env
        env.process(vessel.move())

    ## RUN MODEL
    env.FG = FG
    env.run()

    ## OUTPUT ANALYSIS
    def calculate_distance(orig, dest):
        wgs84 = pyproj.Geod(ellps='WGS84')

        distance = wgs84.inv(orig[0], orig[1], dest[0], dest[1])[2]
        return distance

    lock_cycle_start = np.zeros(len(vessels))
    lock_cycle_duration = np.zeros(len(vessels))
    waiting_in_lineup_start = np.zeros(len(vessels))
    waiting_in_lineup_duration = np.zeros(len(vessels))
    waiting_in_waiting_start = np.zeros(len(vessels))
    waiting_in_waiting_duration = np.zeros(len(vessels))
    vessel_speed = []
    vessel_speeds = []

    for v in range(len(vessels)):
        for t in range(0, len(vessels[v].log["Message"])):
            if "node" in vessels[v].log["Message"][t] and "stop" in vessels[
                    v].log["Message"][t]:
                vessel_speed.append(
                    calculate_distance([
                        vessels[v].log['Geometry'][t].x,
                        vessels[v].log['Geometry'][t].y
                    ], [
                        vessels[v].log['Geometry'][t - 1].x,
                        vessels[v].log['Geometry'][t - 1].y
                    ]) / (vessels[v].log['Timestamp'][t].timestamp() -
                          vessels[v].log['Timestamp'][t - 1].timestamp()))

            if vessels[v].log["Message"][t] == "Passing lock start":
                lock_cycle_start[v] = vessels[v].log["Timestamp"][t].timestamp(
                ) - simulation_start.timestamp()
                lock_cycle_duration[v] = vessels[v].log["Value"][t + 1]

            if vessels[v].log["Message"][t] == "Waiting in line-up area start":
                waiting_in_lineup_start[v] = vessels[v].log["Timestamp"][
                    t].timestamp() - simulation_start.timestamp()
                waiting_in_lineup_duration[v] = vessels[v].log["Value"][t + 1]

            if vessels[v].log["Message"][t] == "Waiting in waiting area start":
                waiting_in_waiting_start[v] = vessels[v].log["Timestamp"][
                    t].timestamp() - simulation_start.timestamp()
                waiting_in_waiting_duration[v] = vessels[v].log["Value"][t + 1]
        vessel_speeds.append(vessel_speed)

    ## TESTS
    # start times
    np.testing.assert_almost_equal(
        lock_1.doors_open + lock_1.doors_close +
        2 * lock_1.lock_width * lock_1.lock_length * wlev_dif[1][0] /
        (lock_1.disch_coeff * lock_1.opening_area *
         math.sqrt(2 * lock_1.grav_acc * lock_1.opening_depth)),
        lock_cycle_duration,
        decimal=0,
        err_msg='',
        verbose=True)

    np.testing.assert_almost_equal(lock_cycle_duration,
                                   waiting_in_lineup_duration,
                                   decimal=0,
                                   err_msg='',
                                   verbose=True)

    np.testing.assert_almost_equal(0,
                                   waiting_in_waiting_duration,
                                   decimal=0,
                                   err_msg='',
                                   verbose=True)

    # durations
    np.testing.assert_almost_equal(
        lock_1.doors_open + lock_1.doors_close +
        2 * lock_1.lock_width * lock_1.lock_length * wlev_dif[1][0] /
        (lock_1.disch_coeff * lock_1.opening_area *
         math.sqrt(2 * lock_1.grav_acc * lock_1.opening_depth)) +
        distances[0] / vessel_speeds[v][0] +
        distances[1] / vessel_speeds[v][1] +
        (distances[2] - 0.5 * vessels[v].L) / vessel_speeds[v][1] +
        (0.5 * vessels[v].L + distances[3]) / vessel_speeds[v][3] +
        ((distances[4] + distances[5]) - 0.5 * vessels[v].L) /
        vessel_speeds[v][4],
        lock_cycle_start,
        decimal=0,
        err_msg='',
        verbose=True)

    np.testing.assert_almost_equal(
        distances[0] / vessel_speeds[v][0] +
        distances[1] / vessel_speeds[v][1] +
        (distances[2] - 0.5 * vessels[v].L) / vessel_speeds[v][1],
        waiting_in_lineup_start,
        decimal=0,
        err_msg='',
        verbose=True)

    np.testing.assert_almost_equal(0,
                                   waiting_in_waiting_start,
                                   decimal=0,
                                   err_msg='',
                                   verbose=True)
예제 #11
0
def create_geometry_from_hdrtime(s1, gpsfile, ship_conf, utmzone):
    grs80 = pyproj.Geod(ellps='GRS80')  # GRS80楕円体
    coord = []

    sx, sy, gx, gy = 0, 0, 0, 0

    sx0 = ship_conf.gps_to_source_right
    sy0 = -1 * ship_conf.gps_to_source_stern
    gx0 = ship_conf.gps_to_receiver_right
    gy0 = -1 * ship_conf.gps_to_receiver_stern

    gpsdata = pd.DataFrame()
    gpsdata = pd.read_csv(gpsfile,sep=",",names=['date_time','lat','lon'],\
                                       parse_dates=True, header=None)

    gpsdata['date_time'] = pd.to_datetime(gpsdata['date_time'])
    gpsdata.set_index(gpsdata['date_time'], drop=True, inplace=True)
    gpsdata = gpsdata.drop_duplicates(['date_time'])
    gpsdata = gpsdata.sort_index()

    for i in range(len(s1)):
        coordination = []
        append = coordination.append
        segy = _read_segy_segy(s1[i].tape)
        for j in range(len(segy.traces)):
            #            tr = segy.traces[j]
            hdr = segy.traces[j].header
            #            if hdr.trace_number_within_the_original_field_record == 1:
            strf = " ".join([
                str(hdr.year_data_recorded),
                str(hdr.day_of_year),
                str(hdr.hour_of_day),
                str(hdr.minute_of_hour),
                str(hdr.second_of_minute)
            ])
            timing = datetime.strptime(strf, '%y %j %H %M %S')
            #            sys.stderr.write(strf)
            #print(timing)
            #            hms = datetime.strftime(timing,'%H%M%S')
            utm_x, utm_y = findxy_from_time(gpsdata, timing, True, utmzone)
            ######
            if i == 0:
                x0 = utm_x
                y0 = utm_y

            x1 = utm_x
            y1 = utm_y

            lon0, lat0 = geotools.utm2gmt(x0, y0, utmzone)
            lon1, lat1 = geotools.utm2gmt(x1, y1, utmzone)
            azimuth, bkw_azimuth, distance = grs80.inv(lat0, lat0, lat1, lat1)
            deg = -1 * azimuth
            rot_sx0, rot_sy0 = geotools.rot_xy(sx0, sy0, deg)
            rot_gx0, rot_gy0 = geotools.rot_xy(gx0, gy0, deg)
            sx = x0 + rot_sx0
            sy = y0 + rot_sy0
            gx = x0 + rot_gx0
            gy = y0 + rot_gy0
            coord = [sx, sy, gx, gy, azimuth]
            append(coord)
            x0, y0 = x1, y1

    return coordination
예제 #12
0
    def calculate_distance(orig, dest):
        wgs84 = pyproj.Geod(ellps='WGS84')

        distance = wgs84.inv(orig[0], orig[1], dest[0], dest[1])[2]
        return distance
예제 #13
0
import datetime
import math
from typing import List, Optional, Tuple, Dict

import pyproj
from flask.json import jsonify

MAX_DISTANCE = 100_000  # maximum allowable error, in meters, for a projection to be considered
MAX_NUM_RESULTS = 10

# list of pyproj.Proj objects is a global for reuse by repeated invocations of cloud functions
global_projs: Optional[Dict[str, pyproj.Proj]] = {}
global_proj_wgs84 = pyproj.Proj('EPSG:4326')
global_geod_wgs84 = pyproj.Geod(ellps='WGS84')


def setup_projs(epsg_only: bool = True) -> Dict[str, pyproj.Proj]:
    """Initialize a list of pyproj.Proj objects, each of which handles one projection."""

    # By default only consider EPSG projections, ignoring ESRI and IGNF
    authorities = ['EPSG'] if epsg_only else pyproj.database.get_authorities()

    # Setting the environment variable PYPROJ_GLOBAL_CONTEXT=ON will make instantiation *much* faster, see
    # https://github.com/pyproj4/pyproj/issues/661#issuecomment-653277033 for details.
    projs = {}
    for authority in authorities:
        for code in pyproj.database.get_codes(
                authority,
                pyproj.enums.PJType.PROJECTED_CRS,
                allow_deprecated=True):
            try:
storm_stop = storm_start + dt.timedelta(minutes=3)
print(f'storm start: {storm_start}')
time_hit = np.logical_and(date_times > storm_start, date_times < storm_stop)
storm_lats = lats[time_hit]
storm_lons = lons[time_hit]
storm_prof_times = prof_times[time_hit]
storm_zvals = zvals[time_hit, :]
storm_height = radar_height[time_hit, :]
storm_date_times = date_times[time_hit]
len(date_times)

# # convert time to distance by using pyproj to get the greatcircle distance between shots

# In[7]:

great_circle = pyproj.Geod(ellps='WGS84')
distance = [0]
start = (storm_lons[0], storm_lats[0])
for index in np.arange(1, len(storm_lons)):
    azi12, azi21, step = great_circle.inv(storm_lons[index - 1],
                                          storm_lats[index - 1],
                                          storm_lons[index], storm_lats[index])
    distance.append(distance[index - 1] + step)
distance = np.array(distance) / meters2km

# # Make the plot assuming that height is the same for every shot
#
# i.e. assume that height[0,:] = height[1,:] = ...
#
# in reality, the bin heights are depend on the details of the radar returns, so
# we would need to historgram the heights into a uniform set of bins -- ignore that for this qualitative picture
예제 #15
0
def distance(lon0, lat0, lon1, lat1):
    wgs84 = pyproj.Geod(ellps='WGS84')
    azimuth, azimuth_inv, dist = wgs84.inv(lon0, lat0, lon1, lat1)
    return dist * 0.001
예제 #16
0
from .calibration import PhysicalLimitsOnly

WGS84_globe = pyproj.Proj(proj='latlong', ellps='WGS84')

def Disk(x, y, radius):
    return Point(x, y).buffer(radius)

# Convenience wrappers for forward and inverse geodetic computations
# on the WGS84 ellipsoid, smoothing over some warts in pyproj.Geod.
# inv() and fwd() take coordinates in lon/lat order and distances in
# meters, whereas the rest of this program uses lat/lon order and
# distances in kilometers.  They are vectorized internally, but do not
# support numpy-style broadcasting.  The prebound _Fwd, _Inv, and
# _Bcast are strictly performance hacks.
_WGS84geod = pyproj.Geod(ellps='WGS84')
def WGS84dist(lat1, lon1, lat2, lon2, *,
              _Inv = _WGS84geod.inv, _Bcast = np.broadcast_arrays):
    _, _, dist = _Inv(*_Bcast(lon1, lat1, lon2, lat2))
    return dist/1000
def WGS84loc(lat, lon, az, dist, *,
             _Fwd = _WGS84geod.fwd, _Bcast = np.broadcast_arrays):
    tlon, tlat, _ = _Fwd(*_Bcast(lon, lat, az, dist*1000))
    return tlat, tlon

# half of the equatorial circumference of the Earth, in meters
# it is impossible for the target to be farther away than this
DISTANCE_LIMIT = 20037508

# PhysicalLimitsOnly instances are data-independent, so we only need two
PHYSICAL_BOUNDS  = PhysicalLimitsOnly('physical')
예제 #17
0
def minute_track_from_1sec_track():
    """
        Function to read qualitychecked one second GPS track data and derive the ships velocity. Some basic filtering is applied to remove faulty data.

        :returns: data frame containing the ships track and velocity at 1 minute resolution for legs 1 to 4
    """
    # velocities are calculated based on 1sec time series and then vector averaged to 1min

    #gps_csv_file_name = 'ace_cruise_track_1sec_2017-02.csv'
    #df_gps = pd.read_csv(gps_csv_file_folder+gps_csv_file_name)
    gps_csv_file_folder = './data/qualitychecked_onesec_10/'

    gps_csv_file_name0 = 'ace_cruise_track_1sec_2016-12.csv'
    gps_csv_file_name1 = 'ace_cruise_track_1sec_2017-01.csv'
    gps_csv_file_name2 = 'ace_cruise_track_1sec_2017-02.csv'
    gps_csv_file_name3 = 'ace_cruise_track_1sec_2017-03.csv'
    gps_csv_file_name4 = 'ace_cruise_track_1sec_2017-04.csv'
    print("reading GPS files ...")

    if 1:
        df_gps0 = pd.read_csv(gps_csv_file_folder + gps_csv_file_name0,
                              usecols=[
                                  'date_time', 'latitude', 'longitude',
                                  'fix_quality', 'device_id'
                              ])
        df_gps1 = pd.read_csv(gps_csv_file_folder + gps_csv_file_name1,
                              usecols=[
                                  'date_time', 'latitude', 'longitude',
                                  'fix_quality', 'device_id'
                              ])
        df_gps2 = pd.read_csv(gps_csv_file_folder + gps_csv_file_name2,
                              usecols=[
                                  'date_time', 'latitude', 'longitude',
                                  'fix_quality', 'device_id'
                              ])
        df_gps3 = pd.read_csv(gps_csv_file_folder + gps_csv_file_name3,
                              usecols=[
                                  'date_time', 'latitude', 'longitude',
                                  'fix_quality', 'device_id'
                              ])
        df_gps4 = pd.read_csv(gps_csv_file_folder + gps_csv_file_name4,
                              usecols=[
                                  'date_time', 'latitude', 'longitude',
                                  'fix_quality', 'device_id'
                              ])

        df_gps = [df_gps0, df_gps1, df_gps2, df_gps3,
                  df_gps4]  # concatenate the 4 wind data files
        df_gps = pd.concat(df_gps)
    else:
        df_gps = pd.read_csv(gps_csv_file_folder + gps_csv_file_name0)
    print("... done")
    #df_gps = pd.read_csv(gps_csv_file_folder+gps_csv_file_name1)
    #df_gps = df_gps.rename(index=str, columns={"date_time": "timest_"})
    df_gps = df_gps.set_index(
        pd.to_datetime(df_gps.date_time,
                       format="%Y-%m-%d %H:%M:%S"))  # assing the time stamp
    df_gps.drop(columns=['date_time'], inplace=True)

    SOG = np.ones_like(df_gps.latitude) * np.nan
    COG = np.ones_like(df_gps.latitude) * np.nan
    velEast = np.ones_like(df_gps.latitude) * np.nan
    velNorth = np.ones_like(df_gps.latitude) * np.nan

    lon1 = np.array(df_gps.longitude[0:-1])
    lat1 = np.array(df_gps.latitude[0:-1])
    lon2 = np.array(df_gps.longitude[1:])
    lat2 = np.array(df_gps.latitude[1:])

    dt = (df_gps.index[1:] - df_gps.index[0:-1])
    dt = np.array(dt.total_seconds())

    MissingValues = np.where(np.isnan(lon1 * lon2 * lat1 * lat2))

    lon1[MissingValues] = 0
    lon2[MissingValues] = 0
    lat1[MissingValues] = 0
    lat2[MissingValues] = 0

    (az12, az21, dist) = pyproj.Geod(ellps=sphere_model_for_pyproj).inv(
        lon1, lat1, lon2, lat2)
    vel = np.true_divide(dist, dt)
    vel[MissingValues] = np.nan
    print(str(np.sum(vel > 12)) + 'samples with velocity larger than 12 m/s')
    vel[vel > 12] = np.nan  # cut unrealistic velocities (vel > 12 m/s)
    vel[dt > (
        1 * 5
    )] = np.nan  # set time diffs with dt>5sec to NaN to  (don't trust edge of data gap velocities)
    vel[np.abs(dt - np.round(dt)) >
        0.015] = np.nan  # these seam to be very noisy in position
    vel[(df_gps.fix_quality[1:].values
         ) == 6] = np.nan  # 6 = estimated (dead reckoning) (2.3 feature)
    vel[(df_gps.fix_quality[:-1].values) == 6] = np.nan
    vel[np.abs(np.diff(df_gps.device_id)) >
        0] = np.nan  # switching of the devices can cause wrong velocities

    evel = np.cos((-az12 + 90) * np.pi / 180) * vel
    nvel = np.sin((-az12 + 90) * np.pi / 180) * vel

    timest_vel = df_gps.index[1:] + pd.to_timedelta(
        dt * 0.5, unit='s')  # just to have a time stamp to plot vel against

    # note SOG, COG are recomputed from velEast velNorth below!
    SOG[1:-1] = np.nanmean([vel[0:-1], vel[1:]], axis=0)
    # calculate sog as avg of vel(jj-1,jj) and vel(jj,jj+1)
    SOG[0] = vel[0]
    SOG[-1] = vel[-1]
    # fill first and last reading

    COG[1:-1] = np.rad2deg((np.arctan2(
        np.nanmean([
            vel[0:-1] * np.sin(np.deg2rad(az12[0:-1])),
            vel[1:] * np.sin(np.deg2rad(az12[1:]))
        ],
                   axis=0),
        np.nanmean([
            vel[0:-1] * np.cos(np.deg2rad(az12[0:-1])),
            vel[1:] * np.cos(np.deg2rad(az12[1:]))
        ],
                   axis=0)) + 2 * np.pi) % (2 * np.pi))
    COG[0] = az12[0]
    COG[-1] = az12[-1]
    # fill first and last reading

    velEast[1:-1] = np.nanmean([evel[0:-1], evel[1:]], axis=0)
    velEast[0] = evel[0]
    velEast[-1] = evel[-1]
    velNorth[1:-1] = np.nanmean([nvel[0:-1], nvel[1:]], axis=0)
    velNorth[0] = nvel[0]
    velNorth[-1] = nvel[-1]

    df_gps = df_gps.assign(SOG=SOG)
    df_gps = df_gps.assign(COG=COG)
    df_gps = df_gps.assign(velEast=velEast)
    df_gps = df_gps.assign(velNorth=velNorth)

    lon_median = df_gps['longitude'].resample(
        str(merge_at_nseconds) + 'S',
        loffset=datetime.timedelta(seconds=(
            merge_at_nseconds *
            .5))).median()  # calculate 6 second mean to match wind data

    nGPS = df_gps.SOG.resample(
        str(merge_at_nseconds) + 'S',
        loffset=datetime.timedelta(seconds=(merge_at_nseconds * 0.5))
    ).count(
    )  # counts per 1min average, put the interval center at the center of the 1min interval

    SOG = df_gps.SOG.copy()
    SOG_MAX = SOG.resample(
        str(merge_at_nseconds) + 'S',
        loffset=datetime.timedelta(seconds=(merge_at_nseconds * .5))).max()
    SOG_MIN = SOG.resample(
        str(merge_at_nseconds) + 'S',
        loffset=datetime.timedelta(seconds=(merge_at_nseconds * .5))).min()

    #df_gps=df_gps.resample('6S', loffset = datetime.timedelta(seconds=3) ).mean() # calculate 6 second mean to match wind data
    df_gps = df_gps.resample(
        str(merge_at_nseconds) + 'S',
        loffset=datetime.timedelta(seconds=(merge_at_nseconds * .5))
    ).mean(
    )  # calculate 1min mean to match wind data, put the interval center at the center of the 1min interval

    df_gps = df_gps.assign(SOG_DIFF=(SOG_MAX - SOG_MIN))

    # here we fix dateline issues of averaging longitudes around +/-180 degree
    # this is a bit coarse, but it works.
    lon_flip_tollerance = 0.0005  # a value well aboved the normal difference of mean and median longitudes
    df_gps['longitude'][np.abs(df_gps.longitude -
                               lon_median) > lon_flip_tollerance] = lon_median[
                                   np.abs(df_gps.longitude -
                                          lon_median) > lon_flip_tollerance]

    df_gps.COG = (90 - np.rad2deg(np.arctan2(df_gps.velNorth, df_gps.velEast))
                  ) % 360  # recompute COG from averaged North/Easte velocities
    df_gps.SOG = np.sqrt(
        np.square(df_gps.velNorth) +
        np.square(df_gps.velEast))  # vector average velocity

    if 1:
        print('nGPS removing ' + str(np.sum((nGPS > 0) & (nGPS < 10))) +
              ' of ' + str(len(df_gps)))
        # REMOVE 1min averages with less than 20 readings (out of60) # ~0.24%
        # REMOVE 1min averages with less than 10 readings (out of60) # ~0.026%

        for val in df_gps.columns:
            df_gps.at[nGPS.values < 10, val] = np.nan
            #df_gps = df_gps.assign( nGPS = nGPS)

    df_gps.drop(columns=['fix_quality', 'device_id'], inplace=True)

    return df_gps
예제 #18
0
def bbox_decompose(
    bbox: Tuple[float, float, float, float],
    resolution: float,
    box_crs: str = DEF_CRS,
    max_px: int = 8000000,
) -> List[Tuple[Tuple[float, float, float, float], str, int, int]]:
    """Split the bounding box vertically for WMS requests.

    Parameters
    ----------
    bbox : tuple
        A bounding box; (west, south, east, north)
    resolution : float
        The target resolution for a WMS request in meters.
    box_crs : str, optional
        The spatial reference of the input bbox, default to EPSG:4326.
    max_px : int, opitonal
        The maximum allowable number of pixels (width x height) for a WMS requests,
        defaults to 8 million based on some trial-and-error.

    Returns
    -------
    tuple
        The first element is a list of bboxes and the second one is width of the last bbox
    """
    check_bbox(bbox)
    _bbox = MatchCRS.bounds(bbox, box_crs, DEF_CRS)
    width, height = bbox_resolution(_bbox, resolution, DEF_CRS)

    n_px = width * height
    if n_px < max_px:
        return [(bbox, "0_0", width, height)]

    geod = pyproj.Geod(ellps="WGS84")
    west, south, east, north = _bbox

    def directional_split(az: float, origin: float, dest: float, lvl: float, xy: bool, px: int):
        divs = [0]
        mul = 1.0
        coords = []

        def get_args(dst, dx):
            return (dst, lvl, az, dx, 0) if xy else (lvl, dst, az, dx, 1)

        while divs[-1] < 1:
            dim = int(np.sqrt(max_px) * mul)
            step = (dim - 1) * resolution

            _dest = origin
            while _dest < dest:
                args = get_args(_dest, step)
                coords.append((_dest, geod.fwd(*args[:-1])[args[-1]]))

                args = get_args(coords[-1][-1], resolution)
                _dest = geod.fwd(*args[:-1])[args[-1]]

            coords[-1] = (coords[-1][0], dest)

            nd = len(coords)
            divs = [dim for _ in range(nd)]
            divs[-1] = px - (nd - 1) * dim
            mul -= 0.1
        return coords, divs

    az_x = geod.inv(west, south, east, south)[0]
    lons, widths = directional_split(az_x, west, east, south, True, width)

    az_y = geod.inv(west, south, west, north)[0]
    lats, heights = directional_split(az_y, south, north, west, False, height)

    bboxs = []
    for i, ((bottom, top), h) in enumerate(zip(lats, heights)):
        for j, ((left, right), w) in enumerate(zip(lons, widths)):
            bx_crs = MatchCRS.bounds((left, bottom, right, top), DEF_CRS, box_crs)
            bboxs.append((bx_crs, f"{i}_{j}", w, h))
    return bboxs
예제 #19
0
    def __init__(self,
                 obslon,
                 obslat,
                 obstime,
                 obsfile=None,
                 wind_east=0,
                 wind_north=0,
                 windreader=None,
                 wind_factor=0.018,
                 name=None):
        """	Reader which statistically extrapolate current forcing.
	 	It uses the residual track obtained by subtracting the wind forcing component
	 	from the past observed motion of a particle.
		"""
        if name is not None:
            self.name = name
        else:
            self.name = 'reader_current_from_observation'

        # Cover whole earth, no validity radius yet
        self.proj4 = '+proj=latlong'
        self.xmin = -180
        self.xmax = 180
        self.ymin = -90
        self.ymax = 90

        self.z = np.ma.array([0.], mask=[False], fill_value=1e+20)

        self.start_time = obstime[1] + timedelta(hours=1)
        self.end_time = self.start_time + timedelta(days=10)
        self.times = np.arange(self.start_time, self.end_time,
                               timedelta(hours=1)).astype(datetime)
        self.time_step = self.times[-1] - self.times[-2]

        time_delta_seconds = (obstime[1] - obstime[0]).total_seconds()

        if windreader is not None:
            x0, y0 = windreader.lonlat2xy(obslon[0], obslat[0])
            x1, y1 = windreader.lonlat2xy(obslon[1], obslat[1])

            #NB! Use wind speeds at observed positions
            wind0 = windreader.get_variables(['x_wind', 'y_wind'],
                                             x=x0,
                                             y=y0,
                                             time=obstime[0])
            wind1 = windreader.get_variables(['x_wind', 'y_wind'],
                                             x=x1,
                                             y=y1,
                                             time=obstime[1])
            meanwind_x = (wind0['x_wind'][0] + wind1['x_wind'][0]) / 2
            meanwind_y = (wind0['y_wind'][0] + wind1['y_wind'][0]) / 2

        else:
            meanwind_x = wind_east
            meanwind_y = wind_north  # wind_east/north should be given in m/s

        g = pyproj.Geod(ellps='WGS84')
        self.azimuth, backazimuth, self.dist = \
         g.inv(obslon[0], obslat[0], obslon[1], obslat[1], radians=False)

        self.speed = self.dist / (time_delta_seconds)

        #Calculate the average speed in x/y-direction of the residual
        self.x_sea_water_velocity = self.speed * np.sin(
            np.radians(self.azimuth)) - meanwind_x * wind_factor
        self.y_sea_water_velocity = self.speed * np.cos(
            np.radians(self.azimuth)) - meanwind_y * wind_factor

        super(Reader, self).__init__()
예제 #20
0
def distance(lat1, lon1, lat2, lon2):
    geod = pyproj.Geod(ellps="WGS84")
    angle1, angle2, distance = geod.inv(lon1, lat1, lon2, lat2)
    return distance
예제 #21
0
 def test_nearest_center(self):
     geod = pyproj.Geod(ellps='sphere')
     mg = sg.gridio.read_mitgridfile('./data/tile005.mitgrid', 270, 90)
     i, j, dist = sg.util.nearest(-83., -24.310, mg['XG'], mg['YG'], geod)
     self.assertEqual((i, j), (135, 45))
     self.assertAlmostEqual(dist, 6.2719790)
예제 #22
0
def lalo_ground2iono(lat,
                     lon,
                     inc_angle,
                     az_angle=None,
                     head_angle=None,
                     iono_height=450e3,
                     method='spherical_distance'):
    """Calculate lat/lon of IPP with the given lat/lon on the ground and LOS geometry

    Equation (12) in Yunjun et al. (2021, TGRS).

    Parameters: lat/lon      - float, latitude/longitude of the point on the ground in degrees
                inc_angle    - float/np.ndarray, incidence angle of the line-of-sight vector on the ground in degrees
                az_angle     - float/np.ndarray, azimuth   angle of the line-of-sight vector
                head_angle   - float, heading angle of the satellite orbit in degrees
                               from the north direction with positive in clockwise direction
                iono_height  - float, height of the ionosphere thin-shell in meters
    Returns:    lat/lon_iono - float, latitude/longitude of the point on the thin-shell ionosphere in degrees
    """
    # LOS incidence angle at ionospheric level
    inc_angle_iono = incidence_angle_ground2iono(inc_angle)

    # geocentric angle distance between the SAR pixel and IPP
    dist_angle = inc_angle - inc_angle_iono

    # LOS azimuth angle (from the ground to the satellite)
    # measured from the north with anti-clockwise as positive (ISCE-2 convention).
    if az_angle is None:
        # heading angle -> azimuth angle
        if head_angle is not None:
            az_angle = ut.heading2azimuth_angle(head_angle)
    if az_angle is None:
        raise ValueError('az_angle can not be None!')

    # option 1 - spherical_distance
    # link:
    #   https://gis.stackexchange.com/questions/5821 [there is a typo there]
    #   http://www.movable-type.co.uk/scripts/latlong.html [used]
    if method == 'spherical_distance':
        # deg -> rad & copy to avoid changing input variable
        lat = np.array(lat) * np.pi / 180.
        lon = np.array(lon) * np.pi / 180.
        dist_angle *= np.pi / 180.
        az_angle *= np.pi / 180.

        # calculate
        lat_iono = np.arcsin(
            np.sin(lat) * np.cos(dist_angle) +
            np.cos(lat) * np.sin(dist_angle) * np.cos(az_angle))
        dlon = np.arctan2(
            np.sin(dist_angle) * np.cos(lat) * np.sin(az_angle) * -1.,
            np.cos(dist_angle) - np.sin(lat) * np.sin(lat_iono))
        lon_iono = np.mod(lon + dlon + np.pi, 2. * np.pi) - np.pi

        # rad -> deg
        lat_iono *= 180. / np.pi
        lon_iono *= 180. / np.pi

    # option 2 - pyproj
    # link: https://pyproj4.github.io/pyproj/stable/api/geod.html#pyproj.Geod.fwd
    elif method == 'pyproj':
        import pyproj
        geod = pyproj.Geod(ellps='WGS84')
        dist = dist_angle * (np.pi / 180.) * EARTH_RADIUS
        lon_iono, lat_iono = geod.fwd(lon, lat, az=-az_angle, dist=dist)[:2]

    else:
        raise ValueError(f'Un-recognized method: {method}')

    ## obsolete
    ## offset angle from equation (25) in Chen and Zebker (2012)
    ## ~3 degree in magnitude for Sentinel-1 asc track
    #off_iono = inc_angle - np.arcsin(EARTH_RADIUS / (EARTH_RADIUS + iono_height) * np.sin(np.pi - inc_angle))
    #lat += off_iono * np.cos(head_angle) * 180. / np.pi
    #lon += off_iono * np.sin(head_angle) * 180. / np.pi

    return lat_iono, lon_iono
예제 #23
0
from collections import namedtuple
from itertools import tee
from typing import List

import overpy
import pyproj
from geojson import Feature, FeatureCollection
from numpy import cos, pi
from shapely.geometry import LineString

PolarCoord = namedtuple('PolarCoord', 'bearing distance')
GEODESIC = pyproj.Geod(ellps='WGS84')


def convert_cartesian_coords_to_polar_coords(start_coord, end_coord,
                                             geodesic: pyproj.Geod = GEODESIC):
    fwd_bearing, _, distance = geodesic.inv(*start_coord, *end_coord)
    return PolarCoord(fwd_bearing % 360, distance)
def calc_distance(lat1, long1, lat2, long2):
    """ Return the distance between two points, in meters.
    """
    geod = pyproj.Geod(ellps="WGS84")
    heading1, heading2, distance = geod.inv(long1, lat1, long2, lat2)
    return distance
예제 #25
0
# -*- coding: utf-8 -*-
"""
Created on Mon Dec  2 14:32:15 2019

@author: Ahmed.Dakrory
"""
import math
import utm

import pyproj
import struct
import time

geodesic = pyproj.Geod(ellps='WGS84')


def convertNumberIntoAsciValue(valueToConvert):
    latLongList = list(str(valueToConvert))
    latLongAsciBytes = []
    for i in range(0, len(latLongList)):
        latLongAsciBytes.append(ord(latLongList[i]))
    return latLongAsciBytes


def ConvertToBytes(data):
    s = struct.pack('>H', data)
    firstByte, secondByte = struct.unpack('>BB', s)
    dataToSend = [firstByte, secondByte]
    return dataToSend

예제 #26
0
파일: stats.py 프로젝트: ivn888/karta
def estimate_vario(mp, npoints=2000, max_dist=None, interval=None):
    """ Given a Multipoint input, estimate a spatial variogram. By default, a
    variogram is quickly estimated. If npoints is None, then the full variogram
    is calculated.

    Parameters
    ----------
    points : Multipoint instance
    npoints : int, optional
        number of values use for variogram appromixation. If npoints is None,
        the full variogram is calculated.
    max_dist : float, optional
        the maximum lag
    interval : float, optional
        the lag interval

    Returns
    -------
    ndarray
        lags
    ndarray
        variogram
    """
    if len(mp.data) != len(mp.vertices):
        raise Exception('estimate_variogram() requires a Multipoint with '
                        'scalar data')
    if npoints > len(mp):
        npoints = len(mp)

    irand = random.sample(np.arange(len(mp)), npoints)
    verts = mp.get_vertices()[irand]
    z = np.array([mp.data[i] for i in irand])

    if isinstance(mp.crs, GeographicalCRS):
        import warnings
        import pyproj
        warnings.warn(
            "For improved performance, consider projecting data to an "
            "approximately equidistant reference system first.")
        geod = pyproj.Geod(ellps="WGS84")

        def distfunc(a, b):
            return geod.inv(a[0], a[1], b[0], b[1])[2]
    else:
        distfunc = "euclidean"

    dist = pdist(verts, distfunc)
    I, J = ppairs(z)
    diff = z[I] - z[J]

    if max_dist is None:
        max_dist = dist.max()
    else:
        max_dist = float(max_dist)

    if interval is None:
        interval = max_dist / 10.0
    else:
        interval = float(interval)

    lags = np.arange(0, max_dist, interval)
    sigma_variance = np.empty_like(lags)
    for i, lag in enumerate(lags):
        band = (lag <= dist) * (dist < lag + interval)
        sigma_variance[i] = 0.5 * np.nanstd(diff[band])**2
    return lags, sigma_variance
예제 #27
0
# coord_analysis.py

import pyproj

geod = pyproj.Geod(ellps="WGS84")


def get_coord(prompt):
    while True:
        s = raw_input(prompt + " (lat,long): ")
        if "," not in s: continue
        s1, s2 = s.split(",", 1)
        try:
            latitude = float(s1.strip())
        except ValueError:
            continue
        try:
            longitude = float(s2.strip())
        except ValueError:
            continue
        return latitude, longitude


def get_num(prompt):
    while True:
        s = raw_input(prompt + ": ")
        try:
            value = float(s)
        except ValueError:
            continue
        return value
예제 #28
0
def lon_lat(lon0, lat0, azimuth, dist):
    dist = dist * 1000.0
    wgs84 = pyproj.Geod(ellps='WGS84')
    lon, lat, azimuth_inv = wgs84.fwd(lon0, lat0, azimuth, dist)
    return lon, lat
예제 #29
0
strike = 300
rake = 100

s = genfromtxt(in_file)
lon = s[:, 1]
lat = s[:, 0]

#Get slip
ds = s[:, 5] * sin(deg2rad(rake))
ss = s[:, 5] * cos(deg2rad(rake))

#Get new coords
z = s[:, 4] + (Dy / 2) * sin(deg2rad(dip))

#Reckon with pyproj
g = pyproj.Geod(ellps='WGS84')
#Now reckon
lo = zeros(len(s))
la = zeros(len(s))
for k in range(len(s)):
    lo[k], la[k], ba = g.fwd(lon[k], lat[k], co_strike, Dx * 1000 / 2)

#Write .rupt
i = ones(len(s))
out = c_[arange(len(s)) + 1, lo, la, z, i * strike, i * dip, 0.5 * i, i, ss,
         ds, Dx * i * 1000, Dy * i * 1000, i]
savetxt(
    out_rupt_file,
    out,
    fmt=
    '%i\t%10.6f\t%10.6f\t%10.6f\t%7.2f\t%7.2f\t%7.4f\t%7.4f\t%7.4f\t%7.4f\t%9.4f\t%9.4f\t%7.4f'
예제 #30
0
    def _extract_angle(self, dataset, cfg, src_meta):
        """Extract data for vector direction from dataset.
        Angles are expected to be counter-clockwise from east in degrees.
        Angles must be recomputed to match the output projection."""

        # Get metadata
        u_channel = src_meta['u_channel']
        v_channel = src_meta['v_channel']
        uv_offset = src_meta['uv_offset']
        uv_scale = src_meta['uv_scale']
        uv_nodatavalue = src_meta['uv_nodatavalue']
        angle_channel = src_meta['angle_channel']
        angle_offset = src_meta['angle_offset']
        angle_scale = src_meta['angle_scale']

        # Get projection settings
        x_origin, pixel_col_width, pixel_row_width, y_origin, \
            pixel_col_height, pixel_row_height = dataset.GetGeoTransform()

        # Get u/v and projected x/y
        u_band = dataset.GetRasterBand(u_channel)
        v_band = dataset.GetRasterBand(v_channel)
        u_array = u_band.ReadAsArray()
        v_array = v_band.ReadAsArray()
        output_shape = u_array.shape
        x_size, y_size = output_shape
        if uv_nodatavalue is not None:
            valid = numpy.where(u_array != uv_nodatavalue)
            u = u_array[valid]
            v = v_array[valid]
            x1 = valid[1] * pixel_col_width + valid[0] * pixel_row_width
            y1 = valid[1] * pixel_col_height + valid[0] * pixel_row_height
        else:
            u = u_array
            v = v_array
            col = numpy.arange(y_size)
            row = numpy.arange(x_size)
            x1 = numpy.tile(col * pixel_col_width, x_size) + \
                numpy.repeat(row * pixel_row_width, y_size)
            y1 = numpy.tile(col * pixel_col_height, x_size) + \
                numpy.repeat(row * pixel_row_height, y_size)

        u = u * uv_scale + uv_offset
        v = v * uv_scale + uv_offset
        x1 = x_origin + x1
        y1 = y_origin + y1

        # Compute angle from u/v
        real_angle = numpy.degrees(numpy.arctan2(v, u))
        del u, v, u_array, v_array

        # Reverse projection to get (lon,lat) for each (x,y)
        output_proj = get_proj4(cfg['output_proj'], src_meta)
        proj = pyproj.Proj(output_proj)

        # Get lon/lat for each x/y
        lons1, lats1 = proj(x1, y1, inverse=True)

        # Use an arbitrary distance (1km)
        dists = numpy.ndarray(shape=lons1.shape)
        dists.fill(1000.0)

        # pyproj.Geod.fwd expects bearings to be clockwise angles from north
        # (in degrees).
        fixed_angles = 90.0 - real_angle

        # Interpolate a destination from grid point and data direction
        geod = pyproj.Geod(ellps='WGS84')
        lons2, lats2, bearings2 = geod.fwd(lons1, lats1, fixed_angles, dists)
        del bearings2, lons1, lats1, real_angle, fixed_angles, dists

        # Warp destination to output projection
        x2, y2 = proj(lons2, lats2)
        del lons2, lats2

        # Fix issue when an interpolated point is not reprojected in the same
        # longitude range as it origin (applies to cylindric projections only)
        ## NEW metadata.py
        ## Before
        #if int(cfg['output_proj']) in (4326, 3857, 900913):
        ## Now
        if cfg['output_proj_type'] == 'cylindric':
        ## \NEW metadata.py
            extent = [float(x) for x in cfg['extent'].split(' ')]
            vport_bottom, vport_left, vport_top, vport_right = extent
            vport_x_extent = vport_right - vport_left
            x2 = numpy.mod(x2 - (x1 + vport_left), vport_x_extent) \
                + (x1 + vport_left)

        # Compute angle in output projection between [0, 360] degrees
        projected_angles = numpy.arctan2(y2 - y1, x2 - x1)
        ranged_angles = numpy.mod(360 + numpy.degrees(projected_angles), 360)
        del x1, y1, x2, y2

        # Rescale angle
        scaled_angles = (ranged_angles - angle_offset) / angle_scale
        scaled_angles = numpy.round(scaled_angles).astype('uint8')

        # Rebuild matrix from flattened data
        if uv_nodatavalue is not None:
            nodatavalue = src_meta['nodatavalues'][angle_channel - 1]
            angle = numpy.empty(output_shape, dtype='uint8')
            angle.fill(nodatavalue)
            angle[valid] = scaled_angles
            return angle
        else:
            scaled_angles.shape = output_shape
            return scaled_angles