def crs_to_proj_dict(crs: CRS) -> dict:
    warnings.filterwarnings("ignore", module="pyproj", category=UserWarning)
    if crs.to_epsg() is not None:
        proj_dict = {"EPSG": crs.to_epsg()}
    else:
        proj_dict = crs.to_dict()
    _remove_unnecessary_proj_params(proj_dict)
    return proj_dict
Ejemplo n.º 2
0
def df_coord_transfor(gdf):
    # 坐标转换算法
    # <location netOffset="-799385.77,-2493897.75" convBoundary="0.00,0.00,18009.61,5593.04" origBoundary="113.832744,22.506539,114.086290,22.692155" projParameter="+proj=utm +zone=49 +ellps=WGS84 +datum=WGS84 +units=m +no_defs"/>

    crs = CRS("+proj=utm +zone=49 +ellps=WGS84 +datum=WGS84 +units=m +no_defs")
    to_crs = crs.to_epsg()

    gdf.to_crs(epsg=to_crs, inplace=True)

    gdf.loc[:, "x_"] = gdf.geometry.x - 799385.77
    gdf.loc[:, "y_"] = gdf.geometry.y - 2493897.75

    return gdf
Ejemplo n.º 3
0
    def records_intersecting(
            self,
            geometry: BaseGeometry,
            crs: CRS = None,
            geometry_fields: List[str] = None) -> List[Dict[str, Any]]:
        if crs is None:
            crs = self.crs

        if crs.to_epsg() is None:
            raise NotImplementedError(f'no EPSG code found for CRS "{crs}"')

        if geometry_fields is None or len(geometry_fields) == 0:
            geometry_fields = list(self.geometry_fields)

        where_clause = []
        where_values = []
        for field in geometry_fields:
            where_values.extend([geometry.wkt, crs.to_epsg()])
            geometry_string = f'ST_GeomFromText(%s, %s)'
            if crs != self.crs:
                geometry_string = f'ST_Transform({geometry_string}, %s)'
                where_values.append(self.crs.to_epsg())
            where_clause.append(f'ST_Intersects({field}, {geometry_string})')
        where_clause = ' OR '.join(where_clause)

        with self.connection as connection:
            with connection.cursor() as cursor:
                cursor.execute(
                    f'SELECT * FROM {self.name} WHERE {where_clause};',
                    where_values)
                records = cursor.fetchall()
        connection.close()

        return [
            parse_record_values(dict(zip(self.fields.keys(), record)),
                                self.fields) for record in records
        ]
Ejemplo n.º 4
0
class Projection():
    def __init__(self,
                 name,
                 output_crs_name=None,
                 manual_first_axis_direction=None):
        self.name = name

        try:
            self.crs = CRS(name)
        except pyproj.exceptions.CRSError:
            if name == "CRS:84":
                self.crs = CRS("EPSG:4326")

        self.manual_first_axis_direction = manual_first_axis_direction
        self.from_wgs84_transformer = Transformer.from_crs(
            "EPSG:4326", self.crs)

        if output_crs_name:
            self.output_crs = CRS(output_crs_name)
            self.output_transform = Transformer.from_crs(
                self.crs, self.output_crs, always_xy=True).transform
        else:
            self.output_crs = None
            self.output_transform = None

    def is_projected(self):
        return self.crs.is_projected

    def get_coordinate_unit(self):
        return self.crs.axis_info[0].unit_name

    def get_epsg(self):
        return self.crs.to_epsg()

    def convert_from_wgs84(self, lon, lat):
        return self.from_wgs84_transformer.transform(lat, lon)

    def is_first_axis_east(self):
        dir = self.manual_first_axis_direction if self.manual_first_axis_direction else self.crs.axis_info[
            0].direction.lower()
        return dir.lower() == 'east'
Ejemplo n.º 5
0
def georef_by_worker(sv_corr: list,
                     alt: xr.DataArray,
                     lon: xr.DataArray,
                     lat: xr.DataArray,
                     hdng: xr.DataArray,
                     heave: xr.DataArray,
                     wline: float,
                     vert_ref: str,
                     input_crs: CRS,
                     horizontal_crs: CRS,
                     z_offset: float,
                     vdatum_directory: str = None):
    """
    Use the raw attitude/navigation to transform the vessel relative along/across/down offsets to georeferenced
    soundings.  Will support transformation to geographic and projected coordinate systems and with a vertical
    reference that you select.

    Parameters
    ----------
    sv_corr
        [x, y, z] offsets generated with sv_correct
    alt
        1d (time) altitude in meters
    lon
        1d (time) longitude in degrees
    lat
        1d (time) latitude in degrees
    hdng
        1d (time) heading in degrees
    heave
        1d (time) heave in degrees
    wline
        waterline offset from reference point
    vert_ref
        vertical reference point, one of ['ellipse', 'vessel', 'waterline']
    input_crs
        pyproj CRS object, input coordinate reference system information for this run
    horizontal_crs
        pyproj CRS object, destination coordinate reference system information for this run
    z_offset
        lever arm from reference point to transmitter
    vdatum_directory
            if 'NOAA MLLW' 'NOAA MHW' is the vertical reference, a path to the vdatum directory is required here

    Returns
    -------
    list
        [xr.DataArray alongtrack offset (time, beam), xr.DataArray acrosstrack offset (time, beam),
         xr.DataArray down offset (time, beam), xr.DataArray corrected heave for TX - RP lever arm, all zeros if in 'ellipse' mode (time),
         xr.DataArray corrected altitude for TX - RP lever arm, all zeros if in 'vessel' or 'waterline' mode (time)]
    """
    g = horizontal_crs.get_geod()

    # unpack the sv corrected data output
    alongtrack = sv_corr[0]
    acrosstrack = sv_corr[1]
    depthoffset = sv_corr[2] + z_offset
    # generate the corrected depth offset depending on the desired vertical reference
    corr_dpth = None
    corr_heave = None
    corr_altitude = None
    if vert_ref in kluster_variables.ellipse_based_vertical_references:
        corr_altitude = alt
        corr_heave = xr.zeros_like(corr_altitude)
        corr_dpth = (depthoffset - corr_altitude.values[:, None]).astype(
            np.float32)
    elif vert_ref == 'vessel':
        corr_heave = heave
        corr_altitude = xr.zeros_like(corr_heave)
        corr_dpth = (depthoffset + corr_heave.values[:, None]).astype(
            np.float32)
    elif vert_ref == 'waterline':
        corr_heave = heave
        corr_altitude = xr.zeros_like(corr_heave)
        corr_dpth = (depthoffset + corr_heave.values[:, None] - wline).astype(
            np.float32)

    # get the sv corrected alongtrack/acrosstrack offsets stacked without the NaNs (arrays have NaNs for beams that do not exist in that sector)
    at_idx, alongtrack_stck = stack_nan_array(alongtrack,
                                              stack_dims=('time', 'beam'))
    ac_idx, acrosstrack_stck = stack_nan_array(acrosstrack,
                                               stack_dims=('time', 'beam'))

    # determine the beam wise offsets
    bm_azimuth = np.rad2deg(np.arctan2(acrosstrack_stck,
                                       alongtrack_stck)) + np.float32(
                                           hdng[at_idx[0]].values)
    bm_radius = np.sqrt(acrosstrack_stck**2 + alongtrack_stck**2)
    pos = g.fwd(lon[at_idx[0]].values, lat[at_idx[0]].values,
                bm_azimuth.values, bm_radius.values)

    z = np.around(corr_dpth, 3)
    if vert_ref == 'NOAA MLLW':
        sep, vdatum_unc = transform_vyperdatum(
            pos[0],
            pos[1],
            xr.zeros_like(z),
            input_crs.to_epsg(),
            'mllw',
            vdatum_directory=vdatum_directory)
    elif vert_ref == 'NOAA MHW':
        sep, vdatum_unc = transform_vyperdatum(
            pos[0],
            pos[1],
            xr.zeros_like(z),
            input_crs.to_epsg(),
            'mhw',
            vdatum_directory=vdatum_directory)
    else:
        sep = 0
        vdatum_unc = xr.zeros_like(z)
    z = z - sep

    if horizontal_crs.is_projected:
        # Transformer.transform input order is based on the CRS, see CRS.geodetic_crs.axis_info
        # - lon, lat - this appears to be valid when using CRS from proj4 string
        # - lat, lon - this appears to be valid when using CRS from epsg
        # use the always_xy option to force the transform to expect lon/lat order
        georef_transformer = Transformer.from_crs(input_crs,
                                                  horizontal_crs,
                                                  always_xy=True)
        newpos = georef_transformer.transform(
            pos[0], pos[1], errcheck=True)  # longitude / latitude order (x/y)
    else:
        newpos = pos

    x = reform_nan_array(np.around(newpos[0], 3), at_idx, alongtrack.shape,
                         alongtrack.coords, alongtrack.dims)
    y = reform_nan_array(np.around(newpos[1], 3), ac_idx, acrosstrack.shape,
                         acrosstrack.coords, acrosstrack.dims)

    return [x, y, z, corr_heave, corr_altitude, vdatum_unc]
Ejemplo n.º 6
0
def CRS_to_uri(crs: CRS) -> str:
    """Convert CRS to URI."""
    epsg_code = crs.to_epsg()
    return f"http://www.opengis.net/def/crs/EPSG/0/{epsg_code}"
Ejemplo n.º 7
0
    def records_intersecting(
            self,
            geometry: BaseGeometry,
            crs: CRS = None,
            geometry_fields: List[str] = None) -> List[Dict[str, Any]]:
        if crs is None:
            crs = self.crs

        if crs.to_epsg() is None:
            raise NotImplementedError(f'no EPSG code found for CRS "{crs}"')

        if geometry_fields is None or len(geometry_fields) == 0:
            geometry_fields = list(self.geometry_fields)

        where_clause = []
        where_values = []
        for field in geometry_fields:
            where_values.extend([geometry.wkt, crs.to_epsg()])
            geometry_string = f'GeomFromText(?, ?)'
            if crs != self.crs:
                geometry_string = f'Transform({geometry_string}, ?)'
                where_values.append(self.crs.to_epsg())
            where_clause.append(f'Intersects({field}, {geometry_string})')
        where_clause = ' OR '.join(where_clause)

        non_geometry_fields = {
            field: field_type
            for field, field_type in self.fields.items()
            if field_type.__name__ not in GEOMETRY_TYPES
        }

        with self.connection:
            cursor = self.connection.cursor()
            cursor.execute(
                f'SELECT {", ".join(non_geometry_fields)} '
                f'FROM {self.name} WHERE {where_clause};',
                where_values,
            )
            non_geometry_records = cursor.fetchall()
            non_geometry_records = [
                parse_record_values(
                    dict(zip(non_geometry_fields.keys(), record)),
                    non_geometry_fields) for record in non_geometry_records
            ]

            geometry_field_string = ', '.join(
                f'asbinary({geometry_field})'
                for geometry_field in self.geometry_fields)
            cursor.execute(
                f'SELECT {geometry_field_string} '
                f'FROM {self.name} WHERE {where_clause};',
                where_values,
            )
            geometry_records = cursor.fetchall()
            geometry_records = [
                parse_record_values(
                    dict(zip(self.geometry_fields.keys(), record)),
                    self.geometry_fields) for record in geometry_records
            ]

        records = [{
            **non_geometry_records[index],
            **geometry_records[index]
        } for index in range(len(non_geometry_records))]
        return [{field: record[field]
                 for field in self.fields} for record in records]
Ejemplo n.º 8
0
def get(
    datasets: dict = GISCO_DATASETS,
    crs: CRS = CRS.from_epsg(3035)
) -> gpd.GeoDataFrame:
    """
    Retrieve NUTS, LAU and countries from GISCO API and make a single, consistent GDF.

    Parameters
    ----------
    datasets : dict
        dict with API URLs.
    crs : str, optional
        Spatial Reference System. The default is "EPSG:4326".

    Returns
    -------
    admin_units : GeoDataFrame
        Table with all administrative units.

    """
    source_crs_code = crs.to_epsg()
    logging.info("Downloading countries...")
    countries = gpd.read_file(datasets["countries"].format(source_crs_code),
                              crs=crs.to_string())
    logging.info("Downloading NUTS...")
    nuts = gpd.read_file(datasets["nuts"].format(source_crs_code),
                         crs=crs.to_string())
    logging.info("Downloading LAU...")
    lau = gpd.read_file(datasets["lau"].format(source_crs_code),
                        crs=crs.to_string())
    logging.info("Done.")

    # Convert to lower case
    countries.columns = countries.columns.str.lower()
    nuts.columns = nuts.columns.str.lower()
    lau.columns = lau.columns.str.lower()

    # Create consistent columns across ds
    lau = lau.rename({"lau_name": "name"}, axis=1)
    lau["levl_code"] = 4

    nuts = nuts.rename({"name_latn": "name"}, axis=1)

    countries = countries.rename({"cntr_name": "name"}, axis=1)
    countries = countries.rename({"cntr_id": "cntr_code"}, axis=1)
    countries["levl_code"] = 0

    # EU+ countries are included both in NUTS (level 0) and in "countries"
    # Discard then NUTS level 0
    nuts_noEU = nuts.loc[~nuts.id.isin(countries.id), :]

    admin_units = pd.concat([countries, nuts_noEU, lau],
                            axis=0,
                            ignore_index=True)

    admin_units = gpd.GeoDataFrame(
        admin_units.
        loc[:,
            ["fid", "name", "name_engl", "cntr_code", "levl_code", "geometry"]]
    )

    # New level codes
    admin_units.levl_code = admin_units.levl_code.replace({
        0: "country",
        1: "NUTS1",
        2: "NUTS2",
        3: "NUTS3",
        4: "LAU"
    })

    # Convert to ISO 3166-1 alpha-2
    transl = {"UK": "GB", "EL": "GR"}
    admin_units["fid"] = admin_units["fid"].replace(transl)
    admin_units["cntr_code"] = admin_units["cntr_code"].replace(transl)

    admin_units.crs = crs.to_string()
    return admin_units
Ejemplo n.º 9
0
def prepareRaster(
    df: pd.DataFrame,
    crs: CRS = CRS.from_epsg(3035),
    variable: str = "",
    delete_orig: bool = False,
):
    """
    Convert original raster or NetCDF into EnerMaps rasters (single band, GeoTiff, EPSG:3035).

    Parameters
    ----------
    df : DataFrame.
        Results of API extraction.
    crs : pyproj.crs.CRS.
       coordinate reference system.
    variable : str, optional.
        Variable of NETCDF.
    delete_orig : bool, optional.
        Set to True to delete original downloaded file (e.g. NetCDF).

    Returns
    -------
    df : DataFrame
        Results with schema for EnerMaps data table

    """
    dicts = []
    for i, row in df.iterrows():
        filename = row["value"]
        if filename.startswith("http"):
            filename = "/vsicurl/" + filename
        if filename[-2:] == "nc":
            src_ds = gdal.Open("NETCDF:{0}:{1}".format(filename, variable))
        else:
            src_ds = gdal.Open(filename)

        # Override function parameter
        if "variable" in row.index:
            variable = row["variable"]

        if "crs" in df.columns:
            source_wkt = osr.SpatialReference()
            source_wkt.ImportFromEPSG(row.crs.to_epsg())
            source_wkt = source_wkt.ExportToPrettyWkt()
            source_crs = CRS.from_wkt(source_wkt)
        else:
            prj = src_ds.GetProjection()
            srs = osr.SpatialReference(wkt=prj)
            source_crs = CRS.from_epsg(srs.GetAttrValue("authority", 1))

        dest_wkt = osr.SpatialReference()
        dest_wkt.ImportFromEPSG(crs.to_epsg())
        dest_wkt = dest_wkt.ExportToPrettyWkt()

        for b in range(src_ds.RasterCount):
            my_dict = {}
            b += 1
            dest_filename = Path(filename).stem
            dest_filename += "_band" + str(b)

            # Translating to make sure that the raster settings are consistent for each band
            logging.info("Translating band {}".format(b))
            os.system(
                "gdal_translate {filename} {dest_filename}.tif -b {b} -of GTIFF --config GDAL_PAM_ENABLED NO -co COMPRESS=DEFLATE -co BIGTIFF=YES".format(
                    filename=filename, dest_filename=dest_filename, b=b
                )
            )

            # Reprojecting if needed
            if source_crs.to_epsg() != crs.to_epsg():
                logging.info(
                    "Warping from {} to {}".format(source_crs.to_epsg(), crs.to_epsg())
                )
                intermediate_filename = dest_filename + ".tif"  # from previous step
                dest_filename += "_{}".format(crs.to_epsg())
                os.system(
                    "gdalwarp {intermediate_filename} {dest_filename}.tif -of GTIFF -s_srs {sourceSRS} -t_srs {outputSRS} --config GDAL_PAM_ENABLED NO -co COMPRESS=DEFLATE -co BIGTIFF=YES".format(
                        intermediate_filename=intermediate_filename,
                        dest_filename=dest_filename,
                        outputSRS=crs.to_string(),
                        sourceSRS=source_crs.to_string(),
                    )
                )
                os.remove(intermediate_filename)

            dest_filename += ".tif"
            logging.info(dest_filename)
            my_dict["start_at"] = row["start_at"] + pd.Timedelta(hours=row["dt"]) * (
                b - 1
            )
            my_dict["z"] = row["z"]
            my_dict["dt"] = row["dt"]
            my_dict["unit"] = row["unit"]
            my_dict["variable"] = variable
            my_dict["fid"] = dest_filename
            my_dict["israster"] = True
            dicts.append(my_dict)
    data = pd.DataFrame(
        dicts,
        columns=[
            "start_at",
            "fields",
            "variable",
            "value",
            "ds_id",
            "fid",
            "dt",
            "z",
            "unit",
            "israster",
        ],
    )
    if delete_orig:
        os.remove(filename)
    return data