def test_equivalent_proj():
    transformer = Transformer.from_proj(
        "+init=epsg:4326", pyproj.Proj(4326).crs.to_proj4(), skip_equivalent=True
    )
    assert transformer._transformer.skip_equivalent
    assert transformer._transformer.projections_equivalent
    assert not transformer._transformer.projections_exact_same
def test_4d_transform_crs_obs2():
    transformer = Transformer.from_proj(4896, 7930)
    assert_almost_equal(
        transformer.transform(
            xx=3496737.2679, yy=743254.4507, zz=5264462.9620, tt=2019.0
        ),
        (3496737.7857162016, 743254.0394113371, 5264462.643659916, 2019.0),
    )
def test_4d_transform_crs_obs1():
    transformer = Transformer.from_proj(7789, 8401)
    assert_almost_equal(
        transformer.transform(
            xx=3496737.2679, yy=743254.4507, zz=5264462.9620, tt=2019.0
        ),
        (3496737.757717311, 743253.9940103051, 5264462.701132784, 2019.0),
    )
Beispiel #4
0
def test_equivalent_proj__different():
    transformer = Transformer.from_proj(3857, 4326, skip_equivalent=True)
    assert transformer._transformer.skip_equivalent
    assert not transformer._transformer.projections_equivalent
    assert not transformer._transformer.projections_exact_same
Beispiel #5
0
def points_to_points(df,
                     time_name,
                     x_name,
                     y_name,
                     data_name,
                     point_data,
                     from_crs,
                     to_crs=None,
                     method='linear',
                     digits=2,
                     min_val=None):
    """
    Function to take a dataframe of point value inputs (df) and interpolate to other points (point_data). Uses the scipy griddata function for interpolation.

    Parameters
    ----------
    df : DataFrame
        A pandas DataFrame containing four columns as shown in the below parameters.
    time_name : str
        If grid is a DataFrame, then time_name is the time column name. If grid is a Dataset, then time_name is the time coordinate name.
    x_name : str
        If grid is a DataFrame, then x_name is the x column name. If grid is a Dataset, then x_name is the x coordinate name.
    y_name : str
        If grid is a DataFrame, then y_name is the y column name. If grid is a Dataset, then y_name is the y coordinate name.
    data_name : str
        If grid is a DataFrame, then data_name is the data column name. If grid is a Dataset, then data_name is the data variable name.
    point_data : str or DataFrame
        Path to geometry file of points to be interpolated (e.g. shapefile). Can be any file type that fiona/gdal can open. It can also be a DataFrame with 'x' and 'y' columns and the crs must be the same as to_crs.
    from_crs : int or str or None
        The projection info for the input data if the result should be reprojected to the to_crs projection (either a proj4 str or epsg int).
    to_crs : int or str or None
        The projection for the output data similar to from_crs.
    method : str
        The scipy griddata interpolation method to be applied. Options are 'nearest', 'linear', and 'cubic'. See `<https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.griddata.html>`_ for more details.
    fill_val : int or float
        fill_val assigns the value outside of the boundary. Defaults to numpy.nan.
    digits : int
        The number of digits to round the output.
    min_val : int, float, or None
        The minimum value for the results. All results below min_val will be assigned min_val.

    Returns
    -------
    DataFrame
    """
    ### Read in points
    if isinstance(point_data, str):
        if _fiona:
            with fiona.open(point_data) as f1:
                point_crs = Proj(f1.crs)
                points = np.array([
                    p['geometry']['coordinates'] for p in f1
                    if p['geometry']['type'] == 'Point'
                ])
        else:
            raise ImportError('Please install fiona for importing GIS files')
    elif isinstance(point_data, pd.DataFrame):
        point_crs = Proj(CRS.from_user_input(to_crs))
        points = point_data[['y', 'x']].values
    else:
        raise ValueError(
            'point_path must be a str path to a geometry file (e.g. shapefile) or a DataFrame with the same x_name and y_name columns'
        )

    df2 = df.copy()

    time1 = pd.to_datetime(df2[time_name].sort_values().unique())

    ### Convert input data to crs of points shp and create input xy
    if to_crs is not None:
        to_crs1 = Proj(CRS.from_user_input(to_crs))
        trans1 = Transformer.from_proj(point_crs, to_crs1)
        points = np.array(trans1.transform(*points.T))
    else:
        to_crs1 = point_crs
    from_crs1 = Proj(CRS.from_user_input(from_crs))
    xy1 = df2[[y_name, x_name]].values
    trans2 = Transformer.from_proj(from_crs1, to_crs1)
    xy_new1 = np.array(trans2.transform(*xy1.T))
    df2[x_name] = xy_new1[1]
    df2[y_name] = xy_new1[0]

    ### Run interpolations
    points1 = np.array((points[1], points[0])).T
    new_lst = []
    for name, group in df2.groupby(time_name):
        print(name)
        xy = group[[x_name, y_name]].values
        new_z = griddata(xy,
                         group[data_name].values,
                         points1,
                         method=method,
                         fill_value=np.nan).round(digits)
        if isinstance(min_val, (int, float)):
            new_z[new_z < min_val] = min_val
        new_lst.extend(new_z.tolist())

    ### Create new df
    time_ar = np.repeat(time1, len(points1))
    x_ar = np.tile(points1.T[0], len(time1))
    y_ar = np.tile(points1.T[1], len(time1))
    new_df = pd.DataFrame({
        'time': time_ar,
        'x': x_ar,
        'y': y_ar,
        data_name: new_lst
    }).set_index(['time', 'x', 'y'])

    ### Export results
    return new_df
Beispiel #6
0
 def assert_all_points_are_valid(crs: CRS, mask: MultiPolygon):
     transformer = Transformer.from_proj(CRS.from_epsg(4326), crs)
     geom: Polygon
     for geom in mask.geoms:
         for point in geom.exterior.coords:
             assert transformer.transform(*point)[0] != float('inf')
Beispiel #7
0
def test_transformer_proj__area_of_interest(aoi_data_directory):
    transformer = Transformer.from_proj(4326,
                                        2964,
                                        area_of_interest=AreaOfInterest(
                                            -136.46, 49.0, -60.72, 83.17))
    assert transformer.description == "Inverse of NAD27 to WGS 84 (13) + Alaska Albers"
Beispiel #8
0
        fn = 0

        # for the 3 buckets
        tp_b_small, tp_b_medium, tp_b_large = 0, 0, 0
        fn_b_small, fn_b_medium, fn_b_large = 0, 0, 0
        fp_b_small, fp_b_medium, fp_b_large = 0, 0, 0

        # false negatives with height
        fn_height = []

        # For average precision
        avg_prec = []

        # transformers for distance calculation in epsg 3857
        transformer = Transformer.from_proj('epsg:3857',
                                            'epsg:4326',
                                            always_xy=True)
        transformer2 = Transformer.from_proj('epsg:4326',
                                             'epsg:3857',
                                             always_xy=True)

        distance_sum = 0
        distance_norm = 0

        distance_sum_b_small, distance_sum_b_medium, distance_sum_b_large = 0, 0, 0
        distance_norm_b_small, distance_norm_b_medium, distance_norm_b_large = 0, 0, 0

        for img_data in all_imgs:
            lon, lat = [float(x) for x in img_data['filepath'].split('_')[1:3]]
            reference_point_4326 = (lon, lat)
            reference_point_3857 = transformer2.transform(
def test_transform__out_of_bounds():
    with pytest.warns(FutureWarning):
        transformer = Transformer.from_proj("+init=epsg:4326",
                                            "+init=epsg:27700")
    assert np.all(
        np.isinf(transformer.transform(100000, 100000, errcheck=True)))
Beispiel #10
0
def test_equivalent_proj__disabled():
    transformer = Transformer.from_proj(3857, pyproj.Proj(3857).crs.to_proj4())
    assert not transformer._transformer.skip_equivalent
    assert not transformer._transformer.projections_equivalent
    assert not transformer._transformer.projections_exact_same
Beispiel #11
0
for item in col_area.find():
    line = []
    line.append(item["엑스좌표_값"])
    line.append(item["와이좌표_값"])

    training_data.append(line)
    training_label.append(item["상권_코드_명"])

# Make KNN Model & fit
classifier = KNeighborsClassifier(n_neighbors=1)
classifier.fit(training_data, training_label)

# Define coordinate EPSG: 5181 to WSG84
transformer = Transformer.from_proj(
    "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs",
    "+proj=tmerc +lat_0=38 +lon_0=127 +k=1 +x_0=200000 +y_0=500000 +ellps=GRS80 +units=m +no_defs",
    always_xy=True,
    skip_equivalent=True)

# Start flask app
app = Flask(__name__)


# index page
@app.route('/')
def index():
    return render_template('gmap.html')


# Response coordinate
@app.route('/call', methods=['GET'])
Beispiel #12
0
def project_geometry(geometry, source, target):
    """Projects a shapely geometry object from the source to the target projection."""

    transformer = Transformer.from_proj(source, target)

    return transform(transformer.transform, geometry)
Beispiel #13
0
def test_equivalent_proj__disabled():
    transformer = Transformer.from_proj(3857, pyproj.Proj(3857).crs.to_proj4())
    assert not transformer._transformer.skip_equivalent
    assert not transformer._transformer.projections_equivalent
    assert not transformer._transformer.projections_exact_same
Beispiel #14
0
from typing import List

#do this as mapnik expects to coordinates in epsg 3857 form some weird reason
import math
from pyproj import Proj, Transformer

wgs84 = Proj('epsg:4326')
mapnik_proj = Proj("epsg:3857")
tranformer = Transformer.from_proj(wgs84, mapnik_proj)


class Vertex:
    def __init__(self, id, label, lat, lng):
        self.id = id
        self.label = label
        self.lat = lat
        self.lng = lng
        self.mapnik_coordinate = tranformer.transform(self.lat, self.lng)


class Edge(object):
    def __init__(self, id, src_id, target_id, cost, skip1, skip2):
        self.id = id
        self.src_id = src_id
        self.target_id = target_id
        self.cost = cost
        self.skip1 = skip1
        self.skip2 = skip2
        self.replaced_by = -1
        self.level = -1
        self.normalized_level = -1
    def __init__(self, config):
        self._config = config

        if self._config.db_enabled:
            # Initialize interface to Postgresql DB
            db_url = {
                "drivername": "postgresql+psycopg2",
                "username": self._config.db_user,
                "password": self._config.db_pw,
                "host": self._config.db_host,
                "port": self._config.db_port,
                "database": self._config.db_name,
            }

            dbschema = self._config.db_schema_import
            self._metadata = MetaData(schema=dbschema)
            logger.info(_("Connecting to database %s"), self._config.db_name)

            # Connect and set path to include VN import schema
            self._db = create_engine(URL(**db_url), echo=False)
            self._conn = self._db.connect()

            # Get dbtable definition
            self._metadata.reflect(bind=self._db, schema=dbschema)

            # Map Biolovision tables in a single dict for easy reference
            self._table_defs = {
                "entities": {
                    "type": "simple",
                    "metadata": None
                },
                "families": {
                    "type": "simple",
                    "metadata": None
                },
                "field_groups": {
                    "type": "fields",
                    "metadata": None
                },
                "field_details": {
                    "type": "fields",
                    "metadata": None
                },
                "forms": {
                    "type": "others",
                    "metadata": None
                },
                "local_admin_units": {
                    "type": "geometry",
                    "metadata": None
                },
                # "uuid_xref": {"type": "others", "metadata": None},
                "observations": {
                    "type": "observation",
                    "metadata": None
                },
                "observers": {
                    "type": "observers",
                    "metadata": None
                },
                "places": {
                    "type": "geometry",
                    "metadata": None
                },
                "species": {
                    "type": "simple",
                    "metadata": None
                },
                "taxo_groups": {
                    "type": "simple",
                    "metadata": None
                },
                "territorial_units": {
                    "type": "simple",
                    "metadata": None
                },
                "validations": {
                    "type": "simple",
                    "metadata": None
                },
            }
            self._table_defs["entities"]["metadata"] = self._metadata.tables[
                dbschema + ".entities_json"]
            self._table_defs["families"]["metadata"] = self._metadata.tables[
                dbschema + ".families_json"]
            self._table_defs["field_groups"][
                "metadata"] = self._metadata.tables[dbschema +
                                                    ".field_groups_json"]
            self._table_defs["field_details"][
                "metadata"] = self._metadata.tables[dbschema +
                                                    ".field_details_json"]
            self._table_defs["forms"]["metadata"] = self._metadata.tables[
                dbschema + ".forms_json"]
            self._table_defs["local_admin_units"][
                "metadata"] = self._metadata.tables[dbschema +
                                                    ".local_admin_units_json"]
            # self._table_defs["uuid_xref"]["metadata"] = self._metadata.tables[
            #     dbschema + ".uuid_xref"
            # ]
            self._table_defs["observations"][
                "metadata"] = self._metadata.tables[dbschema +
                                                    ".observations_json"]
            self._table_defs["observers"]["metadata"] = self._metadata.tables[
                dbschema + ".observers_json"]
            self._table_defs["places"]["metadata"] = self._metadata.tables[
                dbschema + ".places_json"]
            self._table_defs["species"]["metadata"] = self._metadata.tables[
                dbschema + ".species_json"]
            self._table_defs["taxo_groups"][
                "metadata"] = self._metadata.tables[dbschema +
                                                    ".taxo_groups_json"]
            self._table_defs["territorial_units"][
                "metadata"] = self._metadata.tables[dbschema +
                                                    ".territorial_units_json"]
            self._table_defs["validations"][
                "metadata"] = self._metadata.tables[dbschema +
                                                    ".validations_json"]

            # Create transformation
            self._transformer = Transformer.from_proj(
                4326, int(self._config.db_out_proj), always_xy=True)

        return None
Beispiel #16
0
filepath = './data/ULSAN_NG_2018.csv'
df_=pd.read_csv(filepath,index_col=0)


id_set=list(set(df_.id))

id_wgs=[] #it takes 1h 30m
for i in tqdm(id_set):
    id_wgs.append([i,df_[df_.id==i]['x'].iloc[0],df_[df_.id==i]['y'].iloc[0]])
    
df_pop= pd.DataFrame(id_wgs)
df_pop.columns=['id','x','y']

transproj_eq = Transformer.from_proj(
    '+proj=tmerc     +lat_0=38     +lon_0=128     +k=0.9999     +x_0=400000     +y_0=600000     +ellps=bessel     +towgs84=-115.8,474.99,674.11,1.16,-2.31,-1.63,6.43 +units=m +no_defs',
    'EPSG:4326',
    always_xy=True,
    skip_equivalent=True)

coor_array=np.array(df_pop[['x','y']])
coor=[]
for i in range(len(df_pop)):
    coor.append((coor_array[i][0],coor_array[i][1]))
WGS_list=[]
for pt in transproj_eq.itransform(coor):
    WGS=('{:.10f} {:.10f}'.format(*pt).split(' '))
    WGS_list.append((float(WGS[0]),float(WGS[1])))


WGS_lat=[]
WGS_lon=[]
Beispiel #17
0
def grid_to_grid(grid,
                 time_name,
                 x_name,
                 y_name,
                 data_name,
                 grid_res,
                 from_crs,
                 to_crs=None,
                 bbox=None,
                 order=3,
                 extrapolation='constant',
                 fill_val=np.nan,
                 digits=2,
                 min_val=None):
    """
    Function to interpolate regularly or irregularly spaced values over many time stamps. Each time stamp of spatial values are interpolated independently (2D interpolation as opposed to 3D interpolation). Returns an xarray Dataset with the 3 dimensions. Uses the scipy interpolation function called map_coordinates.

    Parameters
    ----------
    grid : DataFrame or Dataset
        A pandas DataFrame or an xarray Dataset. It's recommended to use an xarray Dataset for the input grid as it ensures that the user knows that it is truly regular. Regardless, the input will be regularised.
    time_name : str
        If grid is a DataFrame, then time_name is the time column name. If grid is a Dataset, then time_name is the time coordinate name.
    x_name : str
        If grid is a DataFrame, then x_name is the x column name. If grid is a Dataset, then x_name is the x coordinate name.
    y_name : str
        If grid is a DataFrame, then y_name is the y column name. If grid is a Dataset, then y_name is the y coordinate name.
    data_name : str
        If grid is a DataFrame, then data_name is the data column name. If grid is a Dataset, then data_name is the data variable name.
    grid_res : int or float
        The resulting grid resolution in the unit of the final projection (usually meters or decimal degrees).
    from_crs : int or str or None
        The projection info for the input data if the result should be reprojected to the to_crs projection (either a proj4 str or epsg int).
    to_crs : int or str or None
        The projection for the output data similar to from_crs.
    bbox : tuple of int or float
        The bounding box for the output interpolation in the to_crs projection. None will return a similar grid extent as the input. The tuple should contain four ints or floats in the following order: (x_min, x_max, y_min, y_max)
    order : int
        The order of the spline interpolation, default is 3. The order has to be in the range 0-5. An order of 1 is linear interpolation.
    extrapolation : str
        The equivalent of 'mode' in the map_coordinates function. Options are: 'constant', 'nearest', 'reflect', 'mirror', and 'wrap'. Most reseaonable options for this function will be either 'constant' or 'nearest'. See `<https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.map_coordinates.html>`_ for more details.
    fill_val : int or float
        If 'constant' if passed to the extrapolation parameter, fill_val assigns the value outside of the boundary. Defaults to numpy.nan.
    digits : int
        The number of digits to round the output.
    min_val : int, float, or None
        The minimum value for the results. All results below min_val will be assigned min_val.

    Returns
    -------
    xarray Dataset
    """
    print('Preparing input and output')
    if from_crs == 4326:
        input_digits = 4
    else:
        input_digits = 0
    if (to_crs == 4326) | ((from_crs == 4326) & (to_crs is None)):
        output_digits = 4
    else:
        output_digits = 0

    ### Prepare input data
    arr1, xy_orig_pts, time1 = _process_grid_input(grid, time_name, x_name,
                                                   y_name, data_name)

    input_coords, dxy, x_min, y_min = grid_xy_to_map_coords(
        xy_orig_pts, input_digits)

    ### convert to new projection and prepare X/Y data
    if isinstance(from_crs, (str, int)) & isinstance(to_crs, (str, int)):
        from_crs1 = Proj(CRS.from_user_input(from_crs))
        to_crs1 = Proj(CRS.from_user_input(to_crs))
        trans1 = Transformer.from_proj(from_crs1, to_crs1)
        xy_new = np.array(
            trans1.transform(*xy_orig_pts.T)).round(output_digits)
        out_y_min, out_x_min = xy_new.min(1)
        out_y_max, out_x_max = xy_new.max(1)
    else:
        out_y_max, out_x_max = xy_orig_pts.max(0)
        out_x_min = x_min
        out_y_min = y_min

    ### Prepare output data
    if isinstance(bbox, tuple):
        out_x_min, out_x_max, out_y_min, out_y_max = bbox

    new_x = np.arange(out_x_min, out_x_max, grid_res)
    new_y = np.arange(out_y_min, out_y_max, grid_res)

    xy_out = np.dstack(np.meshgrid(new_y, new_x)).reshape(-1, 2)

    if isinstance(to_crs, (str, int)):
        trans2 = Transformer.from_proj(to_crs1, from_crs1)
        xy_new_index = np.array(trans2.transform(*xy_out.T)).T
    else:
        xy_new_index = xy_out

    output_coords = point_xy_to_map_coords(xy_new_index, dxy, x_min, y_min,
                                           float)

    ### Run interpolation (need to add mutliprossessing)
    arr2 = np.zeros((len(time1), output_coords.shape[1]), arr1.dtype)

    print('Running interpolations...')
    ## An example for using RectBivariateSpline as the equivalent to map_coordinates output (about half as fast):
    #    arr2a = arr2.copy()
    #
    #    x_out = xy_new_index.T[1]
    #    y_out = xy_new_index.T[0]
    #
    #    for d in np.arange(len(arr1)):
    #        arr2a[d] = RectBivariateSpline(y, x, arr1[d], kx=3, ky=3).ev(y_out, x_out)

    for d in np.arange(len(arr1)):
        map_coordinates(arr1[d],
                        output_coords,
                        arr2[d],
                        order=order,
                        mode=extrapolation,
                        cval=fill_val,
                        prefilter=True)

    ### Reshape and package data
    print('Packaging up the output')
    arr2 = arr2.reshape((len(time1), len(new_x), len(new_y))).round(digits)

    if isinstance(min_val, (int, float)):
        arr2[arr2 < min_val] = min_val

    new_ds = xr.DataArray(arr2,
                          coords=[time1, new_x, new_y],
                          dims=['time', 'x', 'y'],
                          name=data_name).to_dataset()

    return new_ds
Beispiel #18
0
def test_transform__out_of_bounds():
    with pytest.warns(FutureWarning):
        transformer = Transformer.from_proj("+init=epsg:4326",
                                            "+init=epsg:27700")
    with pytest.raises(pyproj.exceptions.ProjError):
        transformer.transform(100000, 100000, errcheck=True)
    def __init__(self, image_dir, max_cache=1000, in_proj='epsg:4326'):
        self.image_dir = image_dir

        # config geographic projections
        self.in_proj, self.ign_proj = Proj(init=in_proj), Proj(
            init='epsg:2154')

        # self.transformer_in_out = Transformer.from_proj(in_proj, out_proj)
        self.transformer = Transformer.from_proj(self.in_proj, self.ign_proj)

        # configure cache of tiles
        self.cache_dic = {}
        self.cache_list = []

        self.max_cache = max_cache

        # get all files in the folder image directory
        files = list_files(self.image_dir)

        # construct tile object for all tile images in the list of files
        self.list_tile, \
            self.min_x, \
            self.max_x, \
            self.min_y, \
            self.max_y = \
            self.get_tile_list(files, get_bounds=True)

        print_info('{} tiles were found!'.format(len(self.list_tile)))

        # use the first tile in the list to get images information
        # the information are contained in the last folder's name
        info_images = self.list_tile[0].decomposed_name[-2].split("_")
        self.image_resolution = info_images[2]
        self.image_type = info_images[1]
        self.image_encoding = info_images[3]
        self.image_projection = info_images[4]

        # size and range of images depends on the ign dataset
        global resolution_details
        self.image_range, self.image_size = resolution_details[
            self.image_resolution]

        # configure the spatial area containing all tiles
        self.max_y = self.max_y + self.image_range
        self.min_x = self.min_x - self.image_range

        self.x_size = int((self.max_x - self.min_x) / self.image_range)
        self.y_size = int((self.max_y - self.min_y) / self.image_range)

        # create spatial matrix to order and place tiles relatively to their spatial position
        self.map = np.empty((self.x_size, self.y_size), dtype=object)

        # fill the cartographic matrix with all tiles
        for tile in self.list_tile:
            tile.set_y_max(tile.y_min + self.image_range)
            tile.set_x_min(tile.x_max - self.image_range)
            pos_x, pos_y = self.position_to_tile_index(tile.x_max, tile.y_min)
            if type(self.map[pos_x, pos_y]) is not Tile or int(
                    self.map[pos_x, pos_y].date) < int(tile.date):
                # if 2 tiles cover the same area we keep the most recent one
                self.map[pos_x, pos_y] = tile
Beispiel #20
0
def create_seed_harvest_geoGrid_interpolator_and_read_data(path_to_csv_file, worldGeodeticSys84, geoTargetGrid, ilr_seed_harvest_data):
    "read seed/harvest dates and apoint climate stations"

    wintercrop = {
        "WW": True,
        "SW": False,
        "WR": True,
        "WRa": True,
        "WB": True,
        "SM": False,
        "GM": False,
        "SBee": False,
        "SB": False,
        "CLALF": False,
        "ALF": False,
        "SWR": True
    }

    with open(path_to_csv_file) as _:
        reader = csv.reader(_)

        #print "reading:", path_to_csv_file

        # skip header line
        next(reader)

        points = [] # climate station position (lat, long transformed to a geoTargetGrid, e.g gk5)
        values = [] # climate station ids

        transformer = Transformer.from_proj(worldGeodeticSys84, geoTargetGrid) 

        prev_cs = None
        prev_lat_lon = [None, None]
        #data_at_cs = defaultdict()
        for row in reader:
            
            # first column, climate station
            cs = int(row[0])

            # if new climate station, store the data of the old climate station
            if prev_cs is not None and cs != prev_cs:

                llat, llon = prev_lat_lon
                #r_geoTargetGrid, h_geoTargetGrid = transform(worldGeodeticSys84, geoTargetGrid, llon, llat)
                r_geoTargetGrid, h_geoTargetGrid = transformer.transform(llon, llat)
                    
                points.append([r_geoTargetGrid, h_geoTargetGrid])
                values.append(prev_cs)

            crop_id = row[3]
            is_wintercrop = wintercrop[crop_id]
            ilr_seed_harvest_data[crop_id]["is-winter-crop"] = is_wintercrop

            base_date = date(2001, 1, 1)

            sdoy = int(float(row[4]))
            ilr_seed_harvest_data[crop_id]["data"][cs]["sowing-doy"] = sdoy
            sd = base_date + timedelta(days = sdoy - 1)
            ilr_seed_harvest_data[crop_id]["data"][cs]["sowing-date"] = "0000-{:02d}-{:02d}".format(sd.month, sd.day)

            esdoy = int(float(row[8]))
            ilr_seed_harvest_data[crop_id]["data"][cs]["earliest-sowing-doy"] = esdoy
            esd = base_date + timedelta(days = esdoy - 1)
            ilr_seed_harvest_data[crop_id]["data"][cs]["earliest-sowing-date"] = "0000-{:02d}-{:02d}".format(esd.month, esd.day)

            lsdoy = int(float(row[9]))
            ilr_seed_harvest_data[crop_id]["data"][cs]["latest-sowing-doy"] = lsdoy
            lsd = base_date + timedelta(days = lsdoy - 1)
            ilr_seed_harvest_data[crop_id]["data"][cs]["latest-sowing-date"] = "0000-{:02d}-{:02d}".format(lsd.month, lsd.day)

            digit = 1 if is_wintercrop else 0

            hdoy = int(float(row[6]))
            ilr_seed_harvest_data[crop_id]["data"][cs]["harvest-doy"] = hdoy
            hd = base_date + timedelta(days = hdoy - 1)
            ilr_seed_harvest_data[crop_id]["data"][cs]["harvest-date"] = "000{}-{:02d}-{:02d}".format(digit, hd.month, hd.day)

            ehdoy = int(float(row[10]))
            ilr_seed_harvest_data[crop_id]["data"][cs]["earliest-harvest-doy"] = ehdoy
            ehd = base_date + timedelta(days = ehdoy - 1)
            ilr_seed_harvest_data[crop_id]["data"][cs]["earliest-harvest-date"] = "000{}-{:02d}-{:02d}".format(digit, ehd.month, ehd.day)

            lhdoy = int(float(row[11]))
            ilr_seed_harvest_data[crop_id]["data"][cs]["latest-harvest-doy"] = lhdoy
            lhd = base_date + timedelta(days = lhdoy - 1)
            ilr_seed_harvest_data[crop_id]["data"][cs]["latest-harvest-date"] = "000{}-{:02d}-{:02d}".format(digit, lhd.month, lhd.day)

            lat = float(row[1])
            lon = float(row[2])
            prev_lat_lon = (lat, lon)      
            prev_cs = cs

        ilr_seed_harvest_data[crop_id]["interpolate"] = NearestNDInterpolator(np.array(points), np.array(values))
Beispiel #21
0
def test_equivalent_proj__different():
    transformer = Transformer.from_proj(3857, 4326, skip_equivalent=True)
    assert transformer._transformer.skip_equivalent
    assert not transformer._transformer.projections_equivalent
    assert not transformer._transformer.projections_exact_same
def Daymet_ELM_gridmatching(Grid1_Xdim, Grid1_Ydim, Grid2_x, Grid2_y, \
                         Grid1ifxy=False, Grid2ifxy=True, Grid1_cells=()):

    # Match Grid2 within each Grid1, so return Grid2-xy index for each Grid1 cell
    # by default, (1) Grid1 (parent) in lon/lat (aka ifxy=False), Grid2 in geox/y (aka ifxy=True)
    #             (2) Grid1 XY is grid-mesh-nodes, Grid2xy is grid-centroids. Then is good for searching Grid2 in Grid1
    #             (3) all cells in Grid1 are assigned Grid2's cell-index - maybe only those indiced in 'Grid1_cells'

    # it's supposed that: Grid1 X/Y are in 1-D regularly-intervaled (may not evenly) nodes along axis
    # while, Grid2 might be either like Grid2 or in 2-D mesh.
    if (len(Grid2_x.shape) < 2):
        # Grid2 must be converted to 2D paired x/y mesh, if not
        Grid2_xx, Grid2_yy = np.meshgrid(Grid2_x,
                                         Grid2_y)  # mid-points of grid
    elif (len(Grid2_x.shape) == 2):
        # Grid2 grid-centroids are in paired x/y for each grid
        Grid2_xx = Grid2_x
        Grid2_yy = Grid2_y

    if (len(Grid1_Xdim.shape) == 1
        ):  #  Grid1 mesh in TWO 1-D dimensional nodes
        Grid1_x = Grid1_Xdim
        Grid1_y = Grid1_Ydim
        Grid1_xx, Grid1_yy = np.meshgrid(Grid1_Xdim,
                                         Grid1_Ydim)  # nodes of grid-mesh
    else:
        #Grid1 mesh in 2-D for X/Y axis
        print('TODO - matching range-Grid1 in 2D mesh')
        sys.exit()

    # For projection conversion
    #     short lambert_conformal_conic ;
    #    lambert_conformal_conic:grid_mapping_name = "lambert_conformal_conic" ;
    #    lambert_conformal_conic:longitude_of_central_meridian = -100. ;
    #    lambert_conformal_conic:latitude_of_projection_origin = 42.5 ;
    #    lambert_conformal_conic:false_easting = 0. ;
    #    lambert_conformal_conic:false_northing = 0. ;
    #    lambert_conformal_conic:standard_parallel = 25., 60. ;
    #    lambert_conformal_conic:semi_major_axis = 6378137. ;
    #    lambert_conformal_conic:inverse_flattening = 298.257223563 ;
    #Proj4: +proj=lcc +lon_0=-100 +lat_1=25 +lat_2=60 +k=1 +x_0=0 +y_0=0 +R=6378137 +f=298.257223563 +units=m  +no_defs
    geoxy_proj_str = "+proj=lcc +lon_0=-100 +lat_0=42.5 +lat_1=25 +lat_2=60 +x_0=0 +y_0=0 +R=6378137 +f=298.257223563 +units=m +no_defs"
    geoxyProj = CRS.from_proj4(geoxy_proj_str)

    # EPSG: 4326
    # Proj4: +proj=longlat +datum=WGS84 +no_defs
    lonlatProj = CRS.from_epsg(4326)  # in lon/lat coordinates

    # only if 2 grids are in different projections, do tansformation
    if (Grid2ifxy and not Grid1ifxy):
        Txy2lonlat = Transformer.from_proj(geoxyProj,
                                           lonlatProj,
                                           always_xy=True)
        Grid2_gxx, Grid2_gyy = Txy2lonlat.transform(Grid2_xx, Grid2_yy)

        ij = np.where(Grid2_gxx < 0.0)
        if (len(ij[0]) > 0):
            Grid2_gxx[ij] = Grid2_gxx[
                ij] + 360.0  # for convenience, longitude from 0~360
        ij = np.where(Grid1_x < 0.0)
        if (len(ij[0]) > 0):
            Grid1_x[ij] = Grid1_x[
                ij] + 360.0  # for convenience, longitude from 0~360

    elif (not Grid2ifxy and Grid1ifxy):
        Tlonlat2xy = Transformer.from_proj(lonlatProj,
                                           geoxyProj,
                                           always_xy=True)
        Grid2_gxx, Grid2_gyy = Tlonlat2xy.transform(Grid2_xx, Grid2_yy)

    else:
        Grid2_gxx = Grid2_xx
        Grid2_gyy = Grid2_yy

    # DAYMET grids' index (Grid2) included in each ELM land-grid (Grid1)
    Grid2in1_indx = {}
    if (len(Grid1_cells) <= 0):
        Grid1_ij = np.where(~np.isnan(
            Grid1_xx[:-1, :-1]))  # cell-index rather than mesh-line index
    else:
        Grid1_ij = Grid1_cells

    for indx in range(len(Grid1_ij[0])):  # Grid1 grid-cell no.
        j = Grid1_ij[0][
            indx]  # ELM output data is in (t,elmy,elmx) dimensional-order
        i = Grid1_ij[1][indx]

        iwst = np.min(Grid1_x[i:i + 2])
        iest = np.max(Grid1_x[i:i + 2])
        jsth = np.min(Grid1_y[j:j + 2])
        jnth = np.max(Grid1_y[j:j + 2])
        ij = np.where( ((Grid2_gxx<=iest) & (Grid2_gxx>iwst)) & \
                       ((Grid2_gyy<=jnth) & (Grid2_gyy>jsth)) )
        Grid2in1_indx[str(indx)] = deepcopy(ij)

        if False:  # comment out the following - not correct
            #if(len(ij[0])<1):
            # none of DAYMET cell centroid inside a ELM grid, find the close one instead
            closej = np.where(
                (Grid2_gyy <= jnth) &
                (Grid2_gyy >
                 jsth))  # do lat/y first, due to evenly-intervaled along lat/y
            if closej[0].size <= 0:
                closei = np.where(
                    (Grid2_gxx <= iest) & (Grid2_gxx > iwst))  # do lon/x first
                if (closei[0].size > 0):
                    closeiy = np.argmin(
                        abs(Grid2_gyy[closei] - (jnth + jsth) / 2.0))
                    closeij = (np.asarray(closei[0][closeiy]),
                               np.asarray(closei[1][closeiy]))
                else:
                    closeij = deepcopy(closei)
            else:
                closejx = np.argmin(
                    abs(Grid2_gxx[closej] - (iwst + iest) / 2.0))
                closeij = (np.asarray(closej[0][closejx]),
                           np.asarray(closej[1][closejx]))
            if len(closeij[0] > 0):
                Grid2in1_indx[str(indx)] = deepcopy(closeij)

    # done with all grids
    return Grid2in1_indx, Grid2_gxx, Grid2_gyy
Beispiel #23
0
 def __init__(self):
     # initializing WGS84 (epsg: 4326) and Israeli TM Grid (epsg: 2039) projections.
     # for more info: https://epsg.io/<epsg_num>/
     self.transformer = Transformer.from_proj(2039, 4326, always_xy=True)
Beispiel #24
0
def visibility_processing(radar, dem_grid,
                          frequency, 
                          beamwidth, 
                          tau,
                          power,
                          gain,
                          loss,
                          fill_value = None, 
                          terrain_altitude_field = None,
                          bent_terrain_altitude_field = None,
                          terrain_slope_field = None,
                          terrain_aspect_field = None,
                          theta_angle_field = None,
                          visibility_field = None,
                          min_vis_altitude_field = None,
                          min_vis_theta_field = None,
                          incident_angle_field = None,
                          sigma_0_field = None,
                          rcs_ground_clutter_field = None,
                          dBm_ground_clutter_field = None,
                          dBZ_ground_clutter_field = None,
                          atm_att = 0.05,
                          mosotti = 0.9644,
                          ke = 4/3., 
                          sigma_0_method = 'gabella',
                          quad_pts_range = 1,
                          quad_pts_az = 9, 
                          quad_pts_el = 9, 
                          parallel = True):
            
    # parse fill value
    if fill_value is None:
        fill_value = get_fillvalue()

    # parse field names
    if terrain_altitude_field is None:
        terrain_altitude_field = get_field_name('terrain_altitude')
    if bent_terrain_altitude_field is None:
        bent_terrain_altitude_field = get_field_name('bent_terrain_altitude')
    if terrain_slope_field is None:
        terrain_slope_field = get_field_name('terrain_slope')
    if terrain_aspect_field is None:
        terrain_aspect_field = get_field_name('terrain_aspect')
    if theta_angle_field is None:
        theta_angle_field = get_field_name('theta_angle')
    if visibility_field is None:
        visibility_field = get_field_name('visibility')
    if min_vis_altitude_field is None:
        min_vis_altitude_field = get_field_name('min_vis_altitude')
    if incident_angle_field is None:
        incident_angle_field = get_field_name('incident_angle')    
    if sigma_0_field is None:
        sigma_0_field = get_field_name('sigma_0')   
    if rcs_ground_clutter_field is None:
        rcs_ground_clutter_field = get_field_name('rcs_ground_clutter')   
    if dBm_ground_clutter_field is None:
        dBm_ground_clutter_field = get_field_name('dBm_ground_clutter')   
    if dBZ_ground_clutter_field is None:
        dBZ_ground_clutter_field = get_field_name('dBZ_ground_clutter')   
        
    # Define aeqd projection for radar local Cartesian coords
    pargs = Proj(proj="aeqd", lat_0 = radar.latitude['data'][0], 
                 lon_0 = radar.longitude['data'][0], datum = "WGS84", 
                 units="m")    
    
    # Define coordinate transform: (local radar Cart coords) -> (DEM coords)
    global transformer
    transformer = Transformer.from_proj(pargs, dem_grid.projection)
    
    # Get quadrature pts and GH_weights in az,el directions
    GH_pts, GH_weights = _GH_quadrature(quad_pts_el, quad_pts_az, beamwidth)
    
    # Create grid interpolator
    global nngrid
    nngrid = _IndexNNGrid(dem_grid.x['data'], dem_grid.y['data'])
    
    # Initialize output fields
    
    nazimuth = len(radar.azimuth['data'])
    ngates = len(radar.range['data'])
    
    visibility = np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    theta =  np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    dem_bent = np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    terrain_slope =  np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    terrain_aspect =  np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    min_vis_theta =  np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    min_vis_altitude =  np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    incident_angle =  np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    sigma_0 =  np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    clutter_rcs =  np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    clutter_dBZ =  np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    clutter_dBm =  np.ma.zeros([nazimuth, ngates], fill_value = fill_value)
    ##
    
    
    dem_data = dem_grid.fields[terrain_altitude_field]['data']
    # In the first step we estimate the variables at topography level but
    # only over the radar coordinates        
    for i, az in enumerate(radar.get_azimuth(0)): # Loop on az angles
    
        # Get radar Cartesian coords at elevation 0
        xr, yr, zr = rad_to_cart(radar.range['data'],
                                 az, 0,
                                 ke = ke)
        # Add radar altitude to z coordinates
        zr = zr[0] + radar.altitude['data']
        
        # Project to DEM coordinates and get corresponding grid indexes
        xr_proj, yr_proj = transformer.transform(xr,yr)
        i_idx,j_idx = nngrid.get_idx(xr_proj,yr_proj)
        
        # Create ray and compute relevant variabls
        ray = _Ray(az, radar.range['data'], i_idx, j_idx, dem_data)
        ray.calc_dem_bent(dem_data, zr)
        ray.calc_slope_and_aspect(dem_data, dem_grid.metadata['resolution'])
        ray.calc_theta()
        ray.calc_min_theta(beamwidth)
        ray.calc_min_vis_alt( ke)
        ray.calc_incident_ang()
        ray.calc_sigma_0(freq_ghz = frequency)
        
        theta[i] = ray.theta
        min_vis_theta[i] = ray.min_vis_theta
        min_vis_altitude[i] = ray.min_vis_alt
        dem_bent[i] = ray.dem_bent
        terrain_slope[i] = ray.slope
        terrain_aspect[i] = ray.aspect
        sigma_0[i] = ray.sigma_0
        incident_angle[i] = ray.incident_ang
        
    theta_dic = get_metadata(theta_angle_field)
    theta_dic['data'] = theta
    
    slope_dic = get_metadata(terrain_slope_field)
    slope_dic['data'] = terrain_slope
    
    aspect_dic = get_metadata(terrain_aspect_field)
    aspect_dic['data'] = terrain_aspect
    
    min_vis_theta_dic = get_metadata(min_vis_theta_field)
    min_vis_theta_dic['data'] = min_vis_theta

    min_vis_altitude_dic = get_metadata(min_vis_altitude_field)
    min_vis_altitude_dic['data'] = min_vis_altitude  
    
    bent_terrain_altitude_dic = get_metadata(bent_terrain_altitude_field)
    bent_terrain_altitude_dic['data'] = dem_bent  

    incident_angle_dic = get_metadata(incident_angle_field)
    incident_angle_dic['data'] = incident_angle  
    
    sigma_0_dic = get_metadata(sigma_0_field)
    sigma_0_dic['data'] = sigma_0  
    
    if quad_pts_range >= 3:
        """ 
        Interpolate range array based on how many quadrature points in range
        are wanted (at least 3)
        For example if quad_pts_range = 5 and original range array is 
        [25,75,125] (bin centers), it will give [0,25,50,50,75,100,125,150]
        i.e. bin 0 - 50 (with center 25) is decomposed into ranges [0,25,50]
        """
        ranges = radar.range['data']
        nrange = len(radar.range['data'])
        dr = np.mean(np.diff(radar.range['data'])) # range res
        intervals = np.arange(ranges[0] - dr / 2, dr * nrange + dr / 2, dr)
        range_resampled = []
        for i in range(len(intervals) - 1):
            range_resampled.extend(np.linspace(intervals[i], intervals[i + 1],
                                               quad_pts_range))
    else:
        # No resampling
        range_resampled = radar.range['data']
    print('done')
    
    # create parallel computing instance
    if parallel:
        pool = mp.Pool(processes=mp.cpu_count(), maxtasksperchild=1)
        map_ = pool.map
    else:
        map_ = map
        
    # Create parameter dictionaries for workers
    radar_params = {}
    radar_params['radar_altitude'] = radar.altitude['data']
    radar_params['freq'] = frequency
    radar_params['power'] = power
    radar_params['gain'] = gain
    radar_params['loss'] = loss
    radar_params['beamwidth'] = beamwidth
    radar_params['tau'] = tau
    
    atm_params = {}
    atm_params['atm_att'] = atm_att
    atm_params['ke'] = ke
    atm_params['mosotti'] = mosotti
    
    numeric_params = {}
    numeric_params['grid_res'] = dem_grid.metadata['resolution']
    numeric_params['quad_pts_range'] = quad_pts_range
    numeric_params['GH_pts'] = GH_pts
    numeric_params['GH_weights'] = GH_weights
    numeric_params['sigma_0_method'] = sigma_0_method
    
    # Loop on fixed angles : el for ppi, az for rhi
    idx = 0
    for i, fixangle in enumerate(radar.fixed_angle['data']): 
        # Create partial worker func that takes only angle as input
        if radar.scan_type == 'ppi':
            angles = list((zip(radar.get_azimuth(i), repeat(fixangle))))
        elif radar.scan_type == 'rhi':
            angles = list((zip(repeat(fixangle), radar.get_elevation(i))))
            
        partialworker = partial(_worker_function, 
                                ranges = range_resampled,
                                dem_data = dem_data,
                                radar_params = radar_params,
                                atm_params = atm_params,
                                numeric_params = numeric_params)
   
        results = list(map_(partialworker, angles))
     
        visibility[idx : idx + len(results), :] = np.array([r[0] for r 
                                                            in results])
        clutter_rcs[idx : idx + len(results), :] = np.array([r[1] for r 
                                                            in results])
        clutter_dBm[idx : idx + len(results), :] = np.array([r[2] for r
                                                            in results])
        clutter_dBZ[idx : idx + len(results), :] = np.array([r[3] for r 
                                                            in results])
        
        idx += len(results)
        
        if parallel:
            pool.close()
       
    visibility_dic = get_metadata(visibility_field)
    visibility_dic['data'] = visibility  

    rcs_ground_clutter_dic = get_metadata(rcs_ground_clutter_field)
    rcs_ground_clutter_dic['data'] = clutter_rcs  
    
    dBm_ground_clutter_dic = get_metadata(dBm_ground_clutter_field)
    dBm_ground_clutter_dic['data'] = clutter_dBm 
    
    dBZ_ground_clutter_dic = get_metadata(dBZ_ground_clutter_field)
    dBZ_ground_clutter_dic['data'] = clutter_dBZ  
    
    return (bent_terrain_altitude_dic, slope_dic, aspect_dic, 
            theta_dic, min_vis_theta_dic, min_vis_altitude_dic,
            visibility_dic, incident_angle_dic, sigma_0_dic,
            rcs_ground_clutter_dic, dBm_ground_clutter_dic,
            dBZ_ground_clutter_dic)
Beispiel #25
0
def match_nearest_edge(graph, track):
    """
    Algorithm to match the track to the nearest edge on the Open Street Map network.

    This function match a track of GPS coordinates, in the (Lat, Lon) format.
    to a graph.

    It loops all over the points to match them to the closest edge of the
    OSM network. The GPS points are projected on the edge with an
    orthogonal projection.
    If the projected point goes outside of the edge, then it is match to
    one extremity (see noiseplanet.utils.oproj documentation for more details).

    Parameters
    ----------
    graph : NetworkX MultiDiGraph
        Graph of the Open Street Map network.
    track : numpy 2D array
        A 2D matrix composed by Latitudes (first column) and Longitudes (second column)
        of the track.

    Returns
    -------
    track_corr : numpy 2D array
        A 2D matrix composed by Latitudes (first column) and Longitudes (second column)
        of the corrected track.
    route : numpy 2D array
        Route connecting all track's points on the Open Street Map network.
    edgeid : numpy 2D array
        List of edges to which each points belongs to.
        Edges id are composed by two extremity nodes id. 
    stats : Dict
        Statistics of the Map Matching.
        'proj_length' is the length of the projection (from track's point to corrected ones),
        'path_length' is the distance on the graph between two following points,
        'unlinked' higlights unconnected points on the graph.
   
    Example
    -------
        >>> import osmnx as ox
        >>> import numpy as np
        >>> from noiseplanet.matcher.model.leuven import leuven
        >>> place_name = "2e Arrondissement, Lyon, France"
        >>> distance = 1000  # meters
        >>> graph = ox.graph_from_address(place_name, distance)
        >>> track = np.array([[45.75809136,  4.83577159],
                              [45.7580932 ,  4.83576182],
                              [45.7580929 ,  4.8357634 ],
                              [45.75809207,  4.8357678 ],
                              [45.75809207,  4.8357678 ],
                              [45.75809647,  4.83574439],
                              [45.75809908,  4.83573054],
                              [45.75809908,  4.83573054],
                              [45.75810077,  4.83572153],
                              [45.75810182,  4.83571596],
                              [45.75810159,  4.83571719],
                              [45.7581021 ,  4.83571442],
                              [45.7580448 ,  4.83558152],
                              [45.75804304,  4.83558066],
                              [45.75804304,  4.83558066],
                              [45.75802703,  4.83557288]])
        >>> track_corr, route_corr, edgeid = match_nearest(graph, track)
    """
    # id of the nearest edges
    edgeid = ox.get_nearest_edges(graph,
                                  track[:, 1],
                                  track[:, 0],
                                  method='balltree',
                                  dist=.000001)
    lat1, lat2, lon1, lon2 = [], [], [], []
    for edge in edgeid:
        lon1.append(graph.nodes[edge[0]]['x'])
        lat1.append(graph.nodes[edge[0]]['y'])
        lon2.append(graph.nodes[edge[1]]['x'])
        lat2.append(graph.nodes[edge[1]]['y'])

    # Reference ellipsoid for distance
    # Projection of the point in the web mercator coordinate system (used by OSM)
    proj_init = "epsg:4326"  # geographic coordinates
    proj_out = "epsg:3857"  # web mercator coordinates
    # Using the class Transformer, faster for large dataset
    transformer = Transformer.from_proj(Proj(init=proj_init),
                                        Proj(init=proj_out))
    Xt, Yt = transformer.transform(track[:, 1], track[:, 0])
    # Projecting the nearest edges' nodes 1, 2 into the same coordinates system
    X1, Y1 = transformer.transform(lon1, lat1)
    X2, Y2 = transformer.transform(lon2, lat2)

    # With the transform function (slower since the 2.2.0 update)
    # Xt, Yt = transform(Proj(init=proj_init), Proj(init=proj_out), track[:,1], track[:,0])
    # X1, Y1 = transform(Proj(init=proj_init), Proj(init=proj_out), lon1, lat1)
    # X2, Y2 = transform(Proj(init=proj_init), Proj(init=proj_out), lon2, lat2)

    # Need to evaluate the projection for each point distance(point, point_proj)
    proj_dist = np.zeros(len(track))
    Xcorr, Ycorr = [], []
    for i in range(len(track)):
        # Ortho projection
        x, y = Xt[i], Yt[i]
        xH, yH = oproj.orthoProjSegment((x, y), (X1[i], Y1[i]), (X2[i], Y2[i]))
        # Stack the coordinates in the web mercator coordinates system
        Xcorr.append(xH)
        Ycorr.append(yH)

    transformer = Transformer.from_proj(Proj(init=proj_out),
                                        Proj(init=proj_init))

    lon_corr, lat_corr = transformer.transform(Xcorr, Ycorr)
    # With the transform function (slower since the 2.2.0 update)
    # lon_corr, lat_corr = transform(Proj(init=proj_out), Proj(init=proj_init), Xcorr, Ycorr)

    # Evaluate the distance betweeen these points
    # Reference ellipsoid for distance
    geod = Geod(ellps='WGS84')
    _, _, proj_dist = geod.inv(track[:, 1], track[:, 0], lon_corr, lat_corr)

    track_corr = np.column_stack((lat_corr, lon_corr))
    route, stats_route = rt.route_from_track(graph, track_corr, edgeid=edgeid)
    stats = dict({"proj_length": proj_dist}, **stats_route)

    return track_corr, route, np.array(edgeid), stats
Beispiel #26
0
def write_matsim_schedule(output_dir, schedule, epsg=''):
    fname = os.path.join(output_dir, "schedule.xml")
    if not epsg:
        epsg = schedule.epsg
    transformer = Transformer.from_proj(Proj('epsg:4326'), Proj(epsg))
    logging.info('Writing {}'.format(fname))

    # Also makes vehicles
    vehicles = {}

    with open(fname, "wb") as f, etree.xmlfile(f, encoding='utf-8') as xf:
        xf.write_declaration(
            doctype='<!DOCTYPE transitSchedule '
            'SYSTEM "http://www.matsim.org/files/dtd/transitSchedule_v2.dtd">')
        with xf.element("transitSchedule"):
            # transitStops first
            with xf.element("transitStops"):
                for stop_facility in schedule.stops():
                    transit_stop_attrib = {'id': str(stop_facility.id)}
                    if stop_facility.epsg == epsg:
                        x = stop_facility.x
                        y = stop_facility.y
                    else:
                        x, y = change_proj(x=stop_facility.lat,
                                           y=stop_facility.lon,
                                           crs_transformer=transformer)
                    transit_stop_attrib['x'], transit_stop_attrib['y'] = str(
                        x), str(y)
                    for k in ADDITIONAL_STOP_FACILITY_ATTRIBUTES:
                        if stop_facility.has_attrib(k):
                            transit_stop_attrib[k] = str(
                                stop_facility.additional_attribute(k))
                    xf.write(etree.Element("stopFacility",
                                           transit_stop_attrib))

            # minimalTransferTimes, if present
            if schedule.minimal_transfer_times:
                with xf.element("minimalTransferTimes"):
                    for stop_1_id, val in schedule.minimal_transfer_times.items(
                    ):
                        minimal_transfer_times_attribs = {
                            'fromStop': str(stop_1_id),
                            'toStop': str(val['stop']),
                            'transferTime': str(val['transferTime'])
                        }
                        xf.write(
                            etree.Element("relation",
                                          minimal_transfer_times_attribs))

                        minimal_transfer_times_attribs['fromStop'] = str(
                            val['stop'])
                        minimal_transfer_times_attribs['toStop'] = str(
                            stop_1_id)
                        xf.write(
                            etree.Element("relation",
                                          minimal_transfer_times_attribs))

            # transitLine
            v_id = 0  # generating some ids for vehicles
            for service_id, service in schedule.services.items():
                transit_line_attribs = {
                    'id': service_id,
                    'name': str(service.name)
                }

                with xf.element("transitLine", transit_line_attribs):
                    for id, route in service._routes.items():
                        transit_route_attribs = {'id': id}

                        with xf.element("transitRoute", transit_route_attribs):
                            rec = etree.Element("transportMode")
                            rec.text = route.mode
                            xf.write(rec)

                            with xf.element("routeProfile"):
                                for j in range(len(route.ordered_stops)):
                                    stop_attribs = {
                                        'refId': str(route.ordered_stops[j])
                                    }

                                    if not (route.departure_offsets
                                            and route.arrival_offsets):
                                        logging.warning(
                                            'The stop(s) along your route don\'t have arrival and departure offsets. '
                                            'This is likely a route with one stop - consider validating your schedule.'
                                        )
                                    else:
                                        if j == 0:
                                            stop_attribs[
                                                'departureOffset'] = route.departure_offsets[
                                                    j]
                                        elif j == len(route.ordered_stops) - 1:
                                            stop_attribs[
                                                'arrivalOffset'] = route.arrival_offsets[
                                                    j]
                                        else:
                                            stop_attribs[
                                                'departureOffset'] = route.departure_offsets[
                                                    j]
                                            stop_attribs[
                                                'arrivalOffset'] = route.arrival_offsets[
                                                    j]

                                        if route.await_departure:
                                            stop_attribs[
                                                'awaitDeparture'] = str(
                                                    route.await_departure[j]
                                                ).lower()
                                    xf.write(
                                        etree.Element("stop", stop_attribs))

                            with xf.element("route"):
                                if not route.route:
                                    logging.warning(
                                        "Route needs to have a network route composed of a list of network links that "
                                        "the vehicle on this route traverses. If read the Schedule from GTFS, the "
                                        "resulting Route objects will not have reference to the network route taken."
                                    )
                                for link_id in route.route:
                                    route_attribs = {'refId': str(link_id)}
                                    xf.write(
                                        etree.Element("link", route_attribs))

                            with xf.element("departures"):
                                for trip_id, trip_dep in route.trips.items():
                                    vehicle_id = 'veh_{}_{}'.format(
                                        v_id, route.mode)
                                    trip_attribs = {
                                        'id': trip_id,
                                        'departureTime': trip_dep,
                                        'vehicleRefId': vehicle_id
                                    }
                                    vehicles[
                                        vehicle_id] = matsim_xml_values.MODE_DICT[
                                            route.mode]
                                    v_id += 1
                                    xf.write(
                                        etree.Element("departure",
                                                      trip_attribs))
    return vehicles
Beispiel #27
0
def azimuthal_mask_wgs84(crs: CRS, resolution=64, threshold=1) -> MultiPolygon:
    """
    Generates a polygon that represents the shape of the `crs` in the WGS 84
    projection system.

    Points that exist within the polygon are points that can be mapped from the
    WGS 84 projection to the `crs`. Any point that is outside cannot be mapped
    between the `crs` and the WGS 84 projection.

    For example, with an orthographic projection, this function can be used as
    an easy way to clip a global polygon (like the world's coastlines) to fit
    within the actual orthographic projection.

    Todo: It is known that this algorithm isn't exactly full-proof.
    For example, +proj=ortho +lat_0=20 is known to produce some invalid edges.
    These are very rare edge cases, which we're ignoring for now, but we might
    need a better solution for later. One known solution is to actually make
    the whole mask consist of right angles, but this looks ugly when rendered.
    We could make this an optional argument if projections are still causing
    issues.

    :param crs: The coordinate reference system to create a mask for.
    :param resolution: The number of points to approximate 1/4th of the mask.
                       In other words, 1 will return a polygon with at least 4
                       coordinates, and 64 will return a polygon with at least
                       256 points.
    :param threshold: The distance (measured in the `crs`'s units) between the
                      polygon's coordinates being from the actual boundary of
                      the `crs's` domain.
    :return: A polygon that approximates the `crs`'s domain in the WGS 84
             projection.
    """
    # Throw an error if an unknown projection is passed in.
    if not _is_supported_projection_method(crs):
        raise Exception('projection method name not supported: ' +
                        crs.coordinate_operation.method_name)

    proj_to_wgs84 = Transformer.from_proj(crs, CRS.from_epsg(4326))
    wgs84_to_proj = Transformer.from_proj(CRS.from_epsg(4326), crs)
    points = []
    for i in range(resolution * 4):
        angle = i * math.pi * 2 / (resolution * 4)
        radius = _binary_search_edge_crs(proj_to_wgs84, angle, threshold)
        position = proj_to_wgs84.transform(
            math.cos(angle) * radius,
            math.sin(angle) * radius)

        # If one of the positions is really close to the anti-meridian, but not
        # because of floating point errors, just set it to 180/-180.
        touches_anti_meridian = math.isclose(
            position[1], 180 * _sign(position[1]),
            abs_tol=0.000000001) or position[1] > 180 or position[1] < -180
        if touches_anti_meridian:
            position = (position[0], 180 * _sign(position[1]))

        points.append(position)

    if _covers_hemisphere(wgs84_to_proj):
        # Stitch points together
        points = _split_points_along_anti_meridian(wgs84_to_proj, points)[0]
        # Used to determine which hemisphere the origin is in.
        origin = proj_to_wgs84.transform(0, 0)
        # Extend the path by including points at the North or South Pole.
        points.extend([
            (_sign(origin[0]) * 90, _sign(points[-1][1]) * 180),
            (_sign(origin[0]) * 90, _sign(points[-1][1]) * -180),
        ])

        return MultiPolygon([_trim_polygon(crs, Polygon(points))])
    else:
        point_groups = _split_points_along_anti_meridian(wgs84_to_proj, points)
        return MultiPolygon(
            list(
                map(
                    lambda point_group: _trim_polygon(crs, Polygon(point_group)
                                                      ), point_groups)))
Beispiel #28
0
def read_schedule(schedule_path, epsg):
    """
    Read MATSim schedule
    :param schedule_path: path to the schedule.xml file
    :param epsg: 'epsg:12345'
    :return: list of Service objects
    """
    services = []
    transformer = Transformer.from_proj(Proj(epsg),
                                        Proj('epsg:4326'),
                                        always_xy=True)

    def write_transitLinesTransitRoute(transitLine, transitRoutes,
                                       transportMode):
        mode = transportMode['transportMode']
        service_id = transitLine['transitLine']['id']
        service_routes = []
        for transitRoute, transitRoute_val in transitRoutes.items():
            stops = [
                Stop(s['stop']['refId'],
                     x=transit_stop_id_mapping[s['stop']['refId']]['x'],
                     y=transit_stop_id_mapping[s['stop']['refId']]['y'],
                     epsg=epsg,
                     transformer=transformer)
                for s in transitRoute_val['stops']
            ]
            for s in stops:
                s.add_additional_attributes(transit_stop_id_mapping[s.id])

            arrival_offsets = []
            departure_offsets = []
            await_departure = []
            for stop in transitRoute_val['stops']:
                if 'departureOffset' not in stop[
                        'stop'] and 'arrivalOffset' not in stop['stop']:
                    pass
                elif 'departureOffset' not in stop['stop']:
                    arrival_offsets.append(stop['stop']['arrivalOffset'])
                    departure_offsets.append(stop['stop']['arrivalOffset'])
                elif 'arrivalOffset' not in stop['stop']:
                    arrival_offsets.append(stop['stop']['departureOffset'])
                    departure_offsets.append(stop['stop']['departureOffset'])
                else:
                    arrival_offsets.append(stop['stop']['arrivalOffset'])
                    departure_offsets.append(stop['stop']['departureOffset'])

                if 'awaitDeparture' in stop['stop']:
                    await_departure.append(
                        str(stop['stop']['awaitDeparture']).lower() in
                        ['true', '1'])

            route = [
                r_val['link']['refId'] for r_val in transitRoute_val['links']
            ]

            trips = {
                'trip_id': [],
                'trip_departure_time': [],
                'vehicle_id': []
            }
            for dep in transitRoute_val['departure_list']:
                trips['trip_id'].append(dep['departure']['id'])
                trips['trip_departure_time'].append(
                    dep['departure']['departureTime'])
                trips['vehicle_id'].append(dep['departure']['vehicleRefId'])

            r = Route(route_short_name=transitLine['transitLine']['name'],
                      mode=mode,
                      stops=stops,
                      route=route,
                      trips=trips,
                      arrival_offsets=arrival_offsets,
                      departure_offsets=departure_offsets,
                      id=transitRoute,
                      await_departure=await_departure)
            service_routes.append(r)
        services.append(Service(id=service_id, routes=service_routes))

    transitLine = {}
    transitRoutes = {}
    transportMode = {}
    transitStops = {}
    transit_stop_id_mapping = {}
    is_minimalTransferTimes = False
    minimalTransferTimes = {
    }  # {'stop_id_1': {stop: 'stop_id_2', transfer_time: 0.0}}

    # transitLines
    for event, elem in ET.iterparse(schedule_path, events=('start', 'end')):
        if event == 'start':
            if elem.tag == 'stopFacility':
                attribs = elem.attrib
                if attribs['id'] not in transitStops:
                    transitStops[attribs['id']] = attribs
                if attribs['id'] not in transit_stop_id_mapping:
                    transit_stop_id_mapping[attribs['id']] = elem.attrib
            if elem.tag == 'minimalTransferTimes':
                is_minimalTransferTimes = not is_minimalTransferTimes
            if elem.tag == 'relation':
                if is_minimalTransferTimes:
                    if not elem.attrib['toStop'] in minimalTransferTimes:
                        attribs = elem.attrib
                        minimalTransferTimes[attribs['fromStop']] = {
                            'stop': attribs['toStop'],
                            'transferTime': float(attribs['transferTime'])
                        }
            if elem.tag == 'transitLine':
                if transitLine:
                    write_transitLinesTransitRoute(transitLine, transitRoutes,
                                                   transportMode)
                transitLine = {"transitLine": elem.attrib}
                transitRoutes = {}

            if elem.tag == 'transitRoute':
                transitRoutes[elem.attrib['id']] = {
                    'stops': [],
                    'links': [],
                    'departure_list': [],
                    'attribs': elem.attrib
                }
                transitRoute = elem.attrib['id']

            # doesn't have any attribs
            # if elem.tag == 'routeProfile':
            #     routeProfile = {'routeProfile': elem.attrib}

            if elem.tag == 'stop':
                transitRoutes[transitRoute]['stops'].append(
                    {'stop': elem.attrib})

            # doesn't have any attribs
            # if elem.tag == 'route':
            #     route = {'route': elem.attrib}

            if elem.tag == 'link':
                transitRoutes[transitRoute]['links'].append(
                    {'link': elem.attrib})

            # doesn't have any attribs
            # if elem.tag == 'departures':
            #     departures = {'departures': elem.attrib}

            if elem.tag == 'departure':
                transitRoutes[transitRoute]['departure_list'].append(
                    {'departure': elem.attrib})
        elif (event == 'end') and (elem.tag == "transportMode"):
            transportMode = {'transportMode': elem.text}

    # add the last one
    write_transitLinesTransitRoute(transitLine, transitRoutes, transportMode)

    return services, minimalTransferTimes
Beispiel #29
0
    def _adjust_grid_from_points(self,
                                 grid_res=None,
                                 to_crs=None,
                                 order=2,
                                 method='linear',
                                 digits=2,
                                 min_val=0):
        """
        Method to adjust a grid by forcing it through point data.

        Parameters
        ----------
        grid_res : int, float, or None
            The resulting grid resolution in the unit of the final projection (usually meters or decimal degrees).
        to_crs : int or str or None
            The projection for the output data similar to from_crs.
        order : int
            The order of the spline interpolation, default is 3. The order has to be in the range 0-5. An order of 1 is linear interpolation.
        method : str
            The scipy griddata interpolation method to be applied. Options are 'nearest', 'linear', and 'cubic'. See `<https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.griddata.html>`_ for more details.
        digits : int
            The number of digits to round the output.
        min_val : int, float, or None
            The minimum value for the results. All results below min_val will be assigned min_val.

        Returns
        -------
        Dataset
        """
        ## Resample the grid if needed
        if isinstance(grid_res, (int, float)):
            grid1 = self.grid_to_grid(grid_res,
                                      to_crs=to_crs,
                                      order=order,
                                      digits=digits,
                                      min_val=min_val)
        else:
            grid1 = self.grid_data

        ## grid to points
        point_data = self.point_data.copy()
        sites1 = point_data[['x', 'y']].drop_duplicates().copy()

        pts1 = self.grid_to_points(sites1,
                                   to_crs=to_crs,
                                   order=order,
                                   digits=digits,
                                   min_val=min_val)

        ## Subtract the original points from the new points
        pts2 = pts1.dropna().reset_index().copy()
        pts2['x'] = pts2['x'].round().astype(int)
        pts2['y'] = pts2['y'].round().astype(int)

        # Convert crs if needed
        if (to_crs is not None) & (to_crs != self._point_crs):
            to_crs1 = Proj(CRS.from_user_input(to_crs))
            trans1 = Transformer.from_proj(self._point_crs, to_crs1)
            points = np.array(
                trans1.transform(*point_data[['x', 'y']].values.T))
            point_data['x'] = points[0]
            point_data['y'] = points[1]

        both1 = pd.merge(point_data,
                         pts2.rename(columns={'precip': 'grid_precip'}),
                         on=['x', 'y', 'time'])

        both1['ratio'] = both1['precip'] / both1['grid_precip']
        self.ratio_precip = both1

        ## Create the bounding box for the new grid
        min_p = sites1.min(0)
        max_p = sites1.max(0)

        min_lon = util.find_nearest(grid1.x, min_p[0])
        max_lon = util.find_nearest(grid1.x, max_p[0])
        min_lat = util.find_nearest(grid1.y, min_p[1])
        max_lat = util.find_nearest(grid1.y, max_p[1])

        ## Points to grid
        ratio_grid = interp2d.points_to_grid(
            both1[['time', 'x', 'y',
                   'ratio']], 'time', 'x', 'y', 'ratio', grid_res, to_crs,
            None, (min_lon, max_lon, min_lat, max_lat), method, 'nearest')

        ## Add original grid by diff grid
        grid2 = xr.merge([grid1, ratio_grid], join='inner')
        grid3 = grid2['precip'] * grid2['ratio']
        grid3.name = 'precip'
        if isinstance(min_val, (int, float)):
            grid3 = xr.where(grid3 < min_val, 0, grid3).copy()

        ## Return
        return grid3.to_dataset()
Beispiel #30
0
def points_to_grid(df,
                   time_name,
                   x_name,
                   y_name,
                   data_name,
                   grid_res,
                   from_crs,
                   to_crs=None,
                   bbox=None,
                   method='linear',
                   extrapolation='contstant',
                   fill_val=np.nan,
                   digits=2,
                   min_val=None):
    """
    Function to take a dataframe of point value inputs (df) and interpolate to a grid. Uses the scipy griddata function for interpolation.

    Parameters
    ----------
    df : DataFrame
        A pandas DataFrame containing four columns as shown in the below parameters.
    time_name : str
        If grid is a DataFrame, then time_name is the time column name. If grid is a Dataset, then time_name is the time coordinate name.
    x_name : str
        If grid is a DataFrame, then x_name is the x column name. If grid is a Dataset, then x_name is the x coordinate name.
    y_name : str
        If grid is a DataFrame, then y_name is the y column name. If grid is a Dataset, then y_name is the y coordinate name.
    data_name : str
        If grid is a DataFrame, then data_name is the data column name. If grid is a Dataset, then data_name is the data variable name.
    grid_res : int or float
        The resulting grid resolution in the unit of the final projection (usually meters or decimal degrees).
    from_crs : int or str or None
        The projection info for the input data if the result should be reprojected to the to_crs projection (either a proj4 str or epsg int).
    to_crs : int or str or None
        The projection for the output data similar to from_crs.
    bbox : tuple of int or float
        The bounding box for the output interpolation in the to_crs projection. None will return a similar grid extent as the input. The tuple should contain four ints or floats in the following order: (x_min, x_max, y_min, y_max)
    method : str
        The scipy griddata interpolation method to be applied. Options are 'nearest', 'linear', and 'cubic'. See `<https://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.griddata.html>`_ for more details.
    extrapolation : str
        Either 'constant' or 'nearest'.
    fill_val : int or float
        If 'constant' if passed to the extrapolation parameter, fill_val assigns the value outside of the boundary. Defaults to numpy.nan.
    digits : int
        The number of digits to round the output.
    min_val : int, float, or None
        The minimum value for the results. All results below min_val will be assigned min_val.

    Returns
    -------
    xarray Dataset
    """
    print('Prepare input and output data')
    if (to_crs == 4326) | ((from_crs == 4326) & (to_crs is None)):
        output_digits = 4
    else:
        output_digits = 0

    ### Prepare input data
    df2 = df.copy()

    time1 = pd.to_datetime(df2[time_name].sort_values().unique())

    ### Convert input data to crs of points shp and create input xy
    from_crs1 = Proj(CRS.from_user_input(from_crs))
    xy1 = np.array(list(zip(df2[y_name], df2[x_name]))).T
    if isinstance(to_crs, (str, int)):
        to_crs1 = Proj(CRS.from_user_input(to_crs))
        trans1 = Transformer.from_proj(from_crs1, to_crs1)
        xy1 = np.array(trans1.transform(*xy1))
        df2[x_name] = xy1[1]
        df2[y_name] = xy1[0]

    ### Prepare output data
    if isinstance(bbox, tuple):
        out_x_min, out_x_max, out_y_min, out_y_max = bbox
    else:
        out_y_min, out_x_min = xy1.min(1).round(output_digits)
        out_y_max, out_x_max = xy1.max(1).round(output_digits)

    new_x = np.arange(out_x_min, out_x_max, grid_res)
    new_y = np.arange(out_y_min, out_y_max, grid_res)

    xy_out = np.dstack(np.meshgrid(new_x, new_y)).reshape(-1, 2)

    ### Run interpolations
    print('Run interpolations...')
    arr2 = np.zeros((len(time1), xy_out.shape[0]), df2[data_name].dtype)
    index1 = {time1[i]: i for i in np.arange(len(time1))}

    #    start1 = time()
    for name, group in df2.groupby(time_name):
        print(name)
        i = index1[name]
        xy = group[[x_name, y_name]].values
        arr2[i] = griddata(xy,
                           group[data_name].values,
                           xy_out,
                           method=method,
                           fill_value=fill_val).round(digits)
        if extrapolation == 'nearest':
            nan_index = np.isnan(arr2[i])
            nan_xy = xy_out[nan_index]
            nonnan_values = arr2[i][~nan_index]
            nonnan_xy = xy_out[~nan_index]
            arr2[i][nan_index] = griddata(nonnan_xy,
                                          nonnan_values,
                                          nan_xy,
                                          method='nearest').round(digits)


#    end1 = time()
#    setb = end1 - start1

### Reshape and package data
    print('Packaging up the output')
    arr2 = arr2.reshape((len(time1), len(new_y), len(new_x))).round(digits)

    if isinstance(min_val, (int, float)):
        arr2[arr2 < min_val] = min_val

    new_ds = xr.DataArray(arr2,
                          coords=[time1, new_y, new_x],
                          dims=['time', 'y', 'x'],
                          name=data_name).to_dataset()

    return new_ds
Beispiel #31
0
Datei: osm.py Projekt: luftj/MaRE
import json
from json.decoder import JSONDecodeError
import os
import cv2
import numpy as np
from time import sleep
import requests
import logging
from osmtogeojson import osmtogeojson
from pyproj import Transformer

from config import path_osm, proj_map, proj_osm, proj_sheets, osm_query, force_osm_download, osm_url, draw_ocean_polygon, download_timeout, fill_polys

transform_osm_to_map = Transformer.from_proj(proj_osm,
                                             proj_map,
                                             skip_equivalent=True,
                                             always_xy=True)
transform_sheet_to_osm = Transformer.from_proj(proj_sheets,
                                               proj_osm,
                                               skip_equivalent=True,
                                               always_xy=True)
transform_sheet_to_map = Transformer.from_proj(proj_sheets,
                                               proj_map,
                                               skip_equivalent=True,
                                               always_xy=True)


def get_from_osm(bbox=[16.3, 54.25, 16.834, 54.5], url=osm_url):
    os.makedirs(path_osm, exist_ok=True)
    data_path = path_osm + "rivers_%s.geojson" % "_".join(map(str, bbox))
Beispiel #32
0
def test_2d_with_time_transform_crs_obs2():
    transformer = Transformer.from_proj(4896, 7930)
    assert_almost_equal(
        transformer.transform(xx=3496737.2679, yy=743254.4507, tt=2019.0),
        (3496737.4105305015, 743254.1014318303, 2019.0),
    )
Beispiel #33
0
from pymodis import downmodis
from pyproj import Proj, Transformer

VERTICAL_TILES = 18
HORIZONTAL_TILES = 36
EARTH_RADIUS = 6371007.181
EARTH_WIDTH = 2 * math.pi * EARTH_RADIUS

TILE_WIDTH = EARTH_WIDTH / HORIZONTAL_TILES
TILE_HEIGHT = TILE_WIDTH

wgs84_proj = Proj('EPSG:4326')
modis_grid = Proj(f'+proj=sinu +R={EARTH_RADIUS} +nagrids=@null '
                  f'+ellps=WGS84 +wktext')

transformer = Transformer.from_proj(wgs84_proj, modis_grid, always_xy=True)


def wgs84_to_modis_tile(xx, yy):
    x, y = transformer.transform(xx, yy)
    h = (EARTH_WIDTH * .5 + x) / TILE_WIDTH
    v = (VERTICAL_TILES * TILE_HEIGHT - y - EARTH_WIDTH * .25) / TILE_HEIGHT
    return f'h{int(h)}v{int(v):02},'


modisOgg = downmodis.downModis(
    user='******',
    password='******',
    destinationFolder='/home/artem/Code/ndvi-test/modis_data',
    tiles=wgs84_to_modis_tile(127.551270, 35.793457),
    product='MOD09GA.006',
Beispiel #34
0
def test_transform_exception():
    transformer = Transformer.from_proj("+init=epsg:4326", "+init=epsg:27700")
    with pytest.raises(ProjError):
        transformer.transform(100000, 100000, errcheck=True)
Beispiel #35
0
def test_2d_with_time_transform_crs_obs2():
    transformer = Transformer.from_proj(4896, 7930)
    assert_almost_equal(
        transformer.transform(xx=3496737.2679, yy=743254.4507, tt=2019.0),
        (3496737.4105305015, 743254.1014318303, 2019.0),
    )
Beispiel #36
0
def test_transform_no_exception():
    # issue 249
    transformer = Transformer.from_proj("+init=epsg:4326", "+init=epsg:27700")
    transformer.transform(1.716073972, 52.658007833, errcheck=True)
    transformer.itransform([(1.716073972, 52.658007833)], errcheck=True)
Beispiel #37
0
def test_transform_honours_input_types(x, y, z):
    # 622
    transformer = Transformer.from_proj(4896, 4896)
    assert transformer.transform(xx=x, yy=y, zz=z) == (x, y, z)
Beispiel #38
0
def visibility_processing(radar,
                          dem_grid,
                          frequency,
                          beamwidth,
                          fill_value=None,
                          terrain_altitude_field=None,
                          bent_terrain_altitude_field=None,
                          terrain_slope_field=None,
                          terrain_aspect_field=None,
                          theta_angle_field=None,
                          visibility_field=None,
                          min_vis_altitude_field=None,
                          min_vis_theta_field=None,
                          incident_angle_field=None,
                          sigma_0_field=None,
                          ke=4 / 3.,
                          quad_pts_range=1,
                          quad_pts_az=9,
                          quad_pts_el=9,
                          parallel=True):

    # parse fill value
    if fill_value is None:
        fill_value = get_fillvalue()

    # parse field names
    if terrain_altitude_field is None:
        terrain_altitude_field = get_field_name('terrain_altitude')
    if bent_terrain_altitude_field is None:
        bent_terrain_altitude_field = get_field_name('bent_terrain_altitude')
    if terrain_slope_field is None:
        terrain_slope_field = get_field_name('terrain_slope')
    if terrain_aspect_field is None:
        terrain_aspect_field = get_field_name('terrain_aspect')
    if theta_angle_field is None:
        theta_angle_field = get_field_name('theta_angle')
    if visibility_field is None:
        visibility_field = get_field_name('visibility')
    if min_vis_altitude_field is None:
        min_vis_altitude_field = get_field_name('min_vis_altitude')
    if incident_angle_field is None:
        incident_angle_field = get_field_name('incident_angle')
    if sigma_0_field is None:
        sigma_0_field = get_field_name('sigma_0')

    # Define aeqd projection for radar local Cartesian coords
    pargs = Proj(proj="aeqd",
                 lat_0=radar.latitude['data'][0],
                 lon_0=radar.longitude['data'][0],
                 datum="WGS84",
                 units="m")

    # Define coordinate transform: (local radar Cart coords) -> (DEM coords)
    global transformer
    transformer = Transformer.from_proj(pargs, dem_grid.projection)

    # Get quadrature pts and weights in az,el directions
    quad_pts, weights = _GH_quadrature(quad_pts_el, quad_pts_az, beamwidth)

    # Create grid interpolator
    global nngrid
    nngrid = _IndexNNGrid(dem_grid.x['data'], dem_grid.y['data'])

    # Initialize output fields

    nazimuth = len(radar.azimuth['data'])
    ngates = len(radar.range['data'])

    visibility = np.ma.zeros([nazimuth, ngates], fill_value=fill_value)
    theta = np.ma.zeros([nazimuth, ngates], fill_value=fill_value)
    dem_bent = np.ma.zeros([nazimuth, ngates], fill_value=fill_value)
    terrain_slope = np.ma.zeros([nazimuth, ngates], fill_value=fill_value)
    terrain_aspect = np.ma.zeros([nazimuth, ngates], fill_value=fill_value)
    min_vis_theta = np.ma.zeros([nazimuth, ngates], fill_value=fill_value)
    min_vis_altitude = np.ma.zeros([nazimuth, ngates], fill_value=fill_value)
    incident_angle = np.ma.zeros([nazimuth, ngates], fill_value=fill_value)
    sigma_0 = np.ma.zeros([nazimuth, ngates], fill_value=fill_value)

    dem_data = dem_grid.fields[terrain_altitude_field]['data']
    # In the first step we estimate the variables at topography level but
    # only over the radar coordinates
    for i, az in enumerate(radar.get_azimuth(0)):  # Loop on az angles

        # Get radar Cartesian coords at elevation 0
        xr, yr, zr = rad_to_cart(radar.range['data'], az, 0, ke=ke)
        # Add radar altitude to z coordinates
        zr = zr[0] + radar.altitude['data']

        # Project to DEM coordinates and get corresponding grid indexes
        xr_proj, yr_proj = transformer.transform(xr, yr)
        i_idx, j_idx = nngrid.get_idx(xr_proj, yr_proj)

        # Create ray and compute relevant variabls
        ray = _Ray(i_idx, j_idx, dem_data)
        ray.calc_dem_bent(dem_data, zr)
        ray.calc_slope_and_aspect(dem_data, dem_grid.metadata['resolution'])
        ray.calc_theta(radar.range['data'])
        ray.calc_min_theta()
        ray.calc_min_vis_alt(radar.range['data'], beamwidth, ke)
        ray.calc_incident_ang()
        ray.calc_sigma_0(frequency)

        theta[i] = ray.theta
        min_vis_theta[i] = ray.min_vis_theta
        min_vis_altitude[i] = ray.min_vis_alt
        dem_bent[i] = ray.dem_bent
        terrain_slope[i] = ray.slope
        terrain_aspect[i] = ray.aspect
        sigma_0[i] = ray.sigma_0
        incident_angle[i] = ray.incident_ang

    theta_dic = get_metadata(theta_angle_field)
    theta_dic['data'] = theta

    slope_dic = get_metadata(terrain_slope_field)
    slope_dic['data'] = terrain_slope

    aspect_dic = get_metadata(terrain_aspect_field)
    aspect_dic['data'] = terrain_aspect

    min_vis_theta_dic = get_metadata(min_vis_theta_field)
    min_vis_theta_dic['data'] = min_vis_theta

    min_vis_altitude_dic = get_metadata(min_vis_altitude_field)
    min_vis_altitude_dic['data'] = min_vis_altitude

    bent_terrain_altitude_dic = get_metadata(bent_terrain_altitude_field)
    bent_terrain_altitude_dic['data'] = dem_bent

    incident_angle_dic = get_metadata(incident_angle_field)
    incident_angle_dic['data'] = incident_angle

    sigma_0_dic = get_metadata(sigma_0_field)
    sigma_0_dic['data'] = sigma_0

    if quad_pts_range >= 3:
        """ 
        Interpolate range array based on how many quadrature points in range
        are wanted (at least 3)
        For example if quad_pts_range = 5 and original range array is 
        [25,75,125] (bin centers), it will give [0,25,50,50,75,100,125,150]
        i.e. bin 0 - 50 (with center 25) is decomposed into ranges [0,25,50]
        """
        ranges = radar.range['data']
        nrange = len(radar.range['data'])
        dr = np.mean(np.diff(radar.range['data']))  # range res
        intervals = np.arange(ranges[0] - dr / 2, dr * nrange + dr / 2, dr)
        range_resampled = []
        for i in range(len(intervals) - 1):
            range_resampled.extend(
                np.linspace(intervals[i], intervals[i + 1], quad_pts_range))
    else:
        # No resampling
        range_resampled = radar.range['data']
    print('done')

    # # create parallel computing instance
    # if parallel:
    #     pool = mp.Pool(processes=mp.cpu_count(), maxtasksperchild=1)
    #     map_ = pool.map
    # else:
    #     map_ = map

    # # Loop on fixed angles : el for ppi, az for rhi
    # idx = 0
    # for i, fixangle in enumerate(radar.fixed_angle['data']):
    #     # Create partial worker func that takes only angle as input
    #     if radar.scan_type == 'ppi':
    #         angles = list((zip(radar.get_azimuth(i), repeat(fixangle))))
    #     elif radar.scan_type == 'rhi':
    #         angles = list((zip(repeat(fixangle), radar.get_elevation(i))))

    #     partialworker = partial(_worker_function,
    #                             ranges = range_resampled,
    #                             rad_alt = radar.altitude['data'],
    #                             dem_data = dem_data,
    #                             ke = ke,
    #                             quad_pts_range = quad_pts_range,
    #                             quad_pts_GH = quad_pts,
    #                             weights = weights)

    #     results = list(map_(partialworker, angles))
    #     visibility[idx : idx + len(results), :] = results
    #     idx += len(results)

    #     if parallel:
    #         pool.close()
    #         pool.join()

    visibility_dic = get_metadata(visibility_field)
    visibility_dic['data'] = visibility

    return (bent_terrain_altitude_dic, slope_dic, aspect_dic, theta_dic,
            min_vis_theta_dic, min_vis_altitude_dic, visibility_dic,
            incident_angle_dic, sigma_0_dic)