def test_from_wkt_invalid(): with pytest.raises(CRSError), pytest.warns(UserWarning): CRS.from_wkt(CRS(4326).to_proj4())
arr[arr == no_data] = np.nan stack_array.append(arr[0, :, :]) print("{} has {} nan, shape is {}\ ".format(raster, np.count_nonzero(np.isnan(arr)), arr.shape)) return np.array(stack_array) outputs_folder = "/home/jake/scripts/land_cover/nan_years" arr = make_raster_stack_no_clip(outputs_folder) crs = CRS.from_wkt( 'PROJCS["Albers_Conical_Equal_Area",GEOGCS["WGS 84",DATUM["WGS_1984",\ SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],\ AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433,\ AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],\ PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["latitude_of_center",23],\ PARAMETER["longitude_of_center",-96],PARAMETER["standard_parallel_1",29.5],\ PARAMETER["standard_parallel_2",45.5],PARAMETER["false_easting",0],\ PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],\ AXIS["Easting",EAST],AXIS["Northing",NORTH]]') transform = Affine(30.0, 0.0, -833925.0, 0.0, -30.0, 2094465.0) first_year = 1986 last_year = 2017 for arr_ind, year in enumerate(range(first_year, last_year + 1, 1)): print("running {}".format(year)) output = np.empty((5784, 7585)) output.fill(-9999) for i in range(5784): for j in range(7585):
def test_from_wkt(): wgs84 = CRS.from_string("+proj=longlat +datum=WGS84 +no_defs") from_wkt = CRS.from_wkt(wgs84.to_wkt()) assert wgs84.to_wkt() == from_wkt.to_wkt()
def reproject(in_raster, out_raster=None, reference_raster=None, target_projection=None, resampling=0, output_format='MEM', quiet=True, compress=True): ''' ''' # Is the output format correct? if out_raster is None and output_format != 'MEM': raise AttributeError( "If output_format is not MEM, out_raster must be defined") # If out_raster is specified, default to GTiff output format if out_raster is not None and output_format == 'MEM': output_format = 'GTiff' if out_raster is None: out_raster = 'ignored' # This is necessary as GDAL expects a string no matter what. else: assert os.path.isdir(os.path.dirname( out_raster)), f'Output folder does not exists: {out_raster}' assert reference_raster is None or target_projection is None, 'reference_raster and target_epsg cannot be applied at the same time.' if isinstance(in_raster, gdal.Dataset): # Dataset already GDAL dataframe. source_raster = in_raster else: try: source_raster = gdal.Open(in_raster, gdal.GA_ReadOnly) except: try: if isinstance(in_raster, np.ndarray): source_raster = array_to_raster( in_raster, reference_raster=reference_raster) else: raise Exception('Unable to transform in_raster.') except: raise Exception('Unable to read in_raster.') # Gather reference information if reference_raster is not None: if isinstance(reference_raster, gdal.Dataset): # Dataset already GDAL dataframe. target_projection = CRS.from_wkt(reference_raster.GetProjection()) else: try: target_projection = CRS.from_wkt( gdal.Open(reference_raster, gdal.GA_ReadOnly).GetProjection()) except: raise Exception('Unable to read reference_raster.') else: try: target_projection = CRS.from_epsg(target_projection) except: try: target_projection = CRS.from_wkt(target_projection) except: try: if isinstance(target_projection, CRS): target_projection = target_projection else: raise Exception( 'Unable to transform target_projection') except: raise Exception('Unable to read target_projection') driver = gdal.GetDriverByName(output_format) datatype = source_raster.GetRasterBand(1).DataType # If the output is not memory, set compression options. creation_options = [] if compress is True: if output_format != 'MEM': if datatype_is_float(datatype) is True: predictor = 3 # Float predictor else: predictor = 2 # Integer predictor creation_options = [ 'COMPRESS=DEFLATE', f'PREDICTOR={predictor}', 'NUM_THREADS=ALL_CPUS', 'BIGTIFF=YES' ] og_projection_osr = osr.SpatialReference() og_projection_osr.ImportFromWkt(source_raster.GetProjection()) dst_projection_osr = osr.SpatialReference() dst_projection_osr.ImportFromWkt(target_projection.to_wkt()) og_transform = source_raster.GetGeoTransform() og_x_size = source_raster.RasterXSize og_y_size = source_raster.RasterYSize coord_transform = osr.CoordinateTransformation(og_projection_osr, dst_projection_osr) o_ulx, xres, xskew, o_uly, yskew, yres = og_transform o_lrx = o_ulx + (og_x_size * xres) o_lry = o_uly + (og_y_size * yres) og_col = (o_lrx - o_ulx) og_row = (o_uly - o_lry) ulx, uly, ulz = coord_transform.TransformPoint(float(o_ulx), float(o_uly)) urx, ury, urz = coord_transform.TransformPoint(float(o_lrx), float(o_uly)) lrx, lry, lrz = coord_transform.TransformPoint(float(o_lrx), float(o_lry)) llx, lly, llz = coord_transform.TransformPoint(float(o_ulx), float(o_lry)) dst_col = max(lrx, urx) - min(llx, ulx) dst_row = max(ury, uly) - min(lry, lly) cols = int((dst_col / og_col) * og_x_size) rows = int((dst_row / og_row) * og_y_size) dst_pixel_width = dst_col / cols dst_pixel_height = dst_row / rows dst_transform = (min(ulx, llx), dst_pixel_width, -0.0, max(uly, ury), 0.0, -dst_pixel_height) destination_dataframe = driver.Create(out_raster, cols, rows, 1, datatype, creation_options) destination_dataframe.SetProjection(target_projection.to_wkt()) destination_dataframe.SetGeoTransform(dst_transform) # gdal.Warp( # destination_dataframe, # source_raster, # format=output_format, # multithread=True, # srcSRS=og_projection_osr.ExportToWkt(), # dstSRS=target_projection.to_wkt(), # ) gdal.ReprojectImage(source_raster, destination_dataframe, og_projection_osr.ExportToWkt(), target_projection.to_wkt(), resampling) destination_dataframe.FlushCache() if output_format == 'MEM': return destination_dataframe else: destination_dataframe = None return out_raster
def reproject_shapefile(shapefile_path, model_raster, out_path) -> str: """Returns two file paths with matching projections Reprojects vector to match projection of raster (if necessary) Parameters ---------_ shapefile_path:str Path to shapefile that will be reprojected model_raster:str Path to model raster from which projection information will be extracted out_path: str Location on disk where output is to be written Returns ------- String path to new reprojected shapefile """ # get raster projection as wkt with rasterio.open(model_raster,'r') as img: raster_wkt = img.profile['crs'].to_wkt() # get shapefile projection as wkt with open(shapefile_path.replace(".shp",".prj")) as rf: shapefile_wkt = rf.read() # if it's a match, nothing needs to be done if raster_wkt == shapefile_wkt: log.warning("CRS already match") # get input directory and filename in_dir = os.path.dirname(shapefile_path) in_name = os.path.splitext(os.path.basename(shapefile_path))[0] # list all elements of shapefile all_shape_files = glob.glob(os.path.join(in_dir,f"{in_name}.*")) # get output directory and filenames out_dir = os.path.dirname(out_path) out_name = os.path.splitext(os.path.basename(out_path))[0] for f in all_shape_files: name, ext = os.path.splitext(os.path.basename(f)) out_f = os.path.join(out_dir,f"{out_name}{ext}") with open(f,'rb') as rf: with open(out_f,'wb') as wf: shutil.copyfileobj(rf,wf) else: # get CRS objects raster_crs = CRS.from_wkt(raster_wkt) shapefile_crs = CRS.from_wkt(shapefile_wkt) #transformer = Transformer.from_crs(raster_crs,shapefile_crs) # convert geometry and crs out_shapefile_path = out_path # os.path.join(temp_dir,os.path.basename(shapefile_path)) data = gpd.read_file(shapefile_path) data_proj = data.copy() data_proj['geometry'] = data_proj['geometry'].to_crs(raster_crs) data_proj.crs = raster_crs # save output data_proj.to_file(out_shapefile_path) return out_shapefile_path
def test_from_wkt_invalid(): with pytest.raises(CRSError): CRS.from_wkt(CRS(4326).to_proj4())
def __init__(self): self.__projection_for_Baden_Wuerttemberg = CRS.from_wkt( # projection as used by Landesamt für Geoinformation und Landentwicklung in Baden-Württemberg """PROJCS["ETRS89 / UTM zone 32N",GEOGCS["ETRS89",DATUM["European Terrestrial Reference System 1989",SPHEROID["GRS 1980",6378137.0,298.257222101,AUTHORITY["EPSG","7019"]],TOWGS84[0.0,0.0,0.0,0.0,0.0,0.0,0.0],AUTHORITY["EPSG","6258"]],PRIMEM["Greenwich",0.0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.017453292519943295],AXIS["Geodetic longitude",EAST],AXIS["Geodetic latitude",NORTH],AUTHORITY["EPSG","4258"]],PROJECTION["Transverse Mercator",AUTHORITY["EPSG","9807"]],PARAMETER["central_meridian",9.0],PARAMETER["latitude_of_origin",0.0],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000.0],PARAMETER["false_northing",0.0],UNIT["m",1.0],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","25832"]]""" ) self.__transformer = Transformer.from_crs( self.__projection_for_Baden_Wuerttemberg, "EPSG:4326")
def is_geographic(self): return CRS.from_wkt(self.wkt).is_geographic
# Show the NDVI geoTiff EEAtiff = 'data/eea_r_3035_1_km_forest-assemblage-sps_2006.tif' EEArs = rs.open(EEAtiff) show(EEArs) # Print specifications of NDVI geoTiff print('No. of bands: ' + str(EEArs.count)) print('Image resolution: ' + str(EEArs.height) + str(EEArs.width)) print('Coordinate Reference System (CRS): ' + str(EEArs.crs)) # Load EEA csv EEAcsv = 'data/eea_r_3035_1_km_forest-assemblage-sps_2006.csv' # EEA data into pandas EEA = pd.read_csv(EEAcsv) # Dataset Coordinate Reference System (inside 'EEArs.crs') crs = CRS.from_wkt('PROJCS["ETRS89-extended / LAEA Europe",GEOGCS["ETRS89",DATUM["European_Terrestrial_Reference_System_1989",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6258"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]]],PROJECTION["Lambert_Azimuthal_Equal_Area"],PARAMETER["latitude_of_center",52],PARAMETER["longitude_of_center",10],PARAMETER["false_easting",4321000],PARAMETER["false_northing",3210000],UNIT["metre",1],AXIS["Easting",EAST],AXIS["Northing",NORTH]]') # WGS84 (lat lon) Reference System #wgs84 = CRS("EPSG:4326") #epsg3035 = CRS("EPSG:3035") # Transform the coordinates #new_transformer = Transformer.from_crs(epsg3035, wgs84) #transformer = Transformer.from_crs(crs, wgs84) #transformer_from_wgs84 = Transformer.from_crs(wgs84, epsg3035) # New coordinates #x3, y3 = transformer.transform(900500, 900500) # Define a projection from the GeoTiff reference system p = Proj(crs) EEA_france = EEA.loc[EEA['x'] >= p(3.5, 44.3)[0]].loc[EEA['x'] <= p(4.6, 45)[0]].loc[EEA['y'] >= p(3.5, 44.3)[1]].loc[ EEA['y'] <= p(4.6, 45)[1]]
def crs_sniffer( *args: Union[str, Path, Sequence[Union[str, Path]]] ) -> Union[List[Union[str, int]], str, int]: """Return the list of CRS found in files. Parameters ---------- args : Union[str, Path, Sequence[Union[str, Path]]] Path(s) to the file(s) to examine. Returns ------- Union[List[str], str] Returns either a list of CRSes or a single CRS definition, depending on the number of instances found. """ crs_list = list() vectors = (".gml", ".shp", ".geojson", ".gpkg", ".json") rasters = (".tif", ".tiff") all_files = vectors + rasters for file in args: found_crs = False suffix = Path(file).suffix.lower() try: if suffix == ".zip": file = archive_sniffer(file, extensions=all_files)[0] suffix = Path(file).suffix.lower() if suffix in vectors: if suffix == ".gpkg": if len(fiona.listlayers(file)) > 1: raise NotImplementedError with fiona.open(file, "r") as src: found_crs = CRS.from_wkt(src.crs_wkt).to_epsg() elif suffix in rasters: with rasterio.open(file, "r") as src: found_crs = CRS.from_user_input(src.crs).to_epsg() else: raise FileNotFoundError("Invalid filename suffix") except FileNotFoundError as e: msg = f"{e}: Unable to open file {args}" LOGGER.warning(msg) raise Exception(msg) except NotImplementedError as e: msg = f"{e}: Multilayer GeoPackages are currently unsupported" LOGGER.error(msg) raise Exception(msg) except RuntimeError: pass crs_list.append(found_crs) if crs_list is None: msg = f"No CRS definitions found in {args}." raise FileNotFoundError(msg) if len(crs_list) == 1: if not crs_list[0]: msg = f"No CRS definitions found in {args}. Assuming {WGS84}." LOGGER.warning(msg) warnings.warn(msg, UserWarning) return WGS84 return crs_list[0] return crs_list
def prepareRaster( df: pd.DataFrame, crs: CRS = CRS.from_epsg(3035), variable: str = "", delete_orig: bool = False, ): """ Convert original raster or NetCDF into EnerMaps rasters (single band, GeoTiff, EPSG:3035). Parameters ---------- df : DataFrame. Results of API extraction. crs : pyproj.crs.CRS. coordinate reference system. variable : str, optional. Variable of NETCDF. delete_orig : bool, optional. Set to True to delete original downloaded file (e.g. NetCDF). Returns ------- df : DataFrame Results with schema for EnerMaps data table """ dicts = [] for i, row in df.iterrows(): filename = row["value"] if filename.startswith("http"): filename = "/vsicurl/" + filename if filename[-2:] == "nc": src_ds = gdal.Open("NETCDF:{0}:{1}".format(filename, variable)) else: src_ds = gdal.Open(filename) # Override function parameter if "variable" in row.index: variable = row["variable"] if "crs" in df.columns: source_wkt = osr.SpatialReference() source_wkt.ImportFromEPSG(row.crs.to_epsg()) source_wkt = source_wkt.ExportToPrettyWkt() source_crs = CRS.from_wkt(source_wkt) else: prj = src_ds.GetProjection() srs = osr.SpatialReference(wkt=prj) source_crs = CRS.from_epsg(srs.GetAttrValue("authority", 1)) dest_wkt = osr.SpatialReference() dest_wkt.ImportFromEPSG(crs.to_epsg()) dest_wkt = dest_wkt.ExportToPrettyWkt() for b in range(src_ds.RasterCount): my_dict = {} b += 1 dest_filename = Path(filename).stem dest_filename += "_band" + str(b) # Translating to make sure that the raster settings are consistent for each band logging.info("Translating band {}".format(b)) os.system( "gdal_translate {filename} {dest_filename}.tif -b {b} -of GTIFF --config GDAL_PAM_ENABLED NO -co COMPRESS=DEFLATE -co BIGTIFF=YES".format( filename=filename, dest_filename=dest_filename, b=b ) ) # Reprojecting if needed if source_crs.to_epsg() != crs.to_epsg(): logging.info( "Warping from {} to {}".format(source_crs.to_epsg(), crs.to_epsg()) ) intermediate_filename = dest_filename + ".tif" # from previous step dest_filename += "_{}".format(crs.to_epsg()) os.system( "gdalwarp {intermediate_filename} {dest_filename}.tif -of GTIFF -s_srs {sourceSRS} -t_srs {outputSRS} --config GDAL_PAM_ENABLED NO -co COMPRESS=DEFLATE -co BIGTIFF=YES".format( intermediate_filename=intermediate_filename, dest_filename=dest_filename, outputSRS=crs.to_string(), sourceSRS=source_crs.to_string(), ) ) os.remove(intermediate_filename) dest_filename += ".tif" logging.info(dest_filename) my_dict["start_at"] = row["start_at"] + pd.Timedelta(hours=row["dt"]) * ( b - 1 ) my_dict["z"] = row["z"] my_dict["dt"] = row["dt"] my_dict["unit"] = row["unit"] my_dict["variable"] = variable my_dict["fid"] = dest_filename my_dict["israster"] = True dicts.append(my_dict) data = pd.DataFrame( dicts, columns=[ "start_at", "fields", "variable", "value", "ds_id", "fid", "dt", "z", "unit", "israster", ], ) if delete_orig: os.remove(filename) return data
import arcpy from pyproj import CRS from pyproj import Transformer import os # environment settings env_path = r"C:\Users\sozeren.mapit\Documents\ArcGIS\Projects\KoordinatSistemiTest\cbsarcgisew.sde" arcpy.env.workspace = env_path # transformation settings src_wkt = """PROJCS["ED_1950_Turkey_12",GEOGCS["GCS_European_1950",DATUM["D_European_1950",SPHEROID["International_1924",6378388.0,297.0]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",12500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",36.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0]]""" target_wkt = """PROJCS["ED_1950_TM36",GEOGCS["GCS_European_1950",DATUM["D_European_1950",SPHEROID["International_1924",6378388.0,297.0]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Gauss_Kruger"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",36.0],PARAMETER["Scale_Factor",1.0],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0],AUTHORITY["EPSG",2322]]""" # initial variables src_crs = CRS.from_wkt(src_wkt) target_crs = CRS.from_wkt(target_wkt) transformer = Transformer.from_crs(src_crs, target_crs) try: with arcpy.da.UpdateCursor("Numarataj", ['OID@', 'SHAPE@XY']) as ucursor: for row in ucursor: old_x, old_y = row[1][0], row[1][1] new_x, new_y = transformer.transform(old_x, old_y) row[1] = new_x, new_y ucursor.updateRow(row) arcpy.DefineProjection_management("Numarataj", target_wkt)