def test_from_epsg_string(): proj = CRS.from_string("epsg:4326") assert proj.to_epsg() == 4326 # Test with invalid EPSG code with pytest.raises(ValueError): assert CRS.from_string("epsg:xyz")
def test_crs_OSR_equivalence(): crs1 = CRS.from_string("+proj=longlat +datum=WGS84 +no_defs") crs2 = CRS.from_string("+proj=latlong +datum=WGS84 +no_defs") crs3 = CRS({"init": "EPSG:4326"}) assert crs1 == crs2 # these are not equivalent in proj.4 now as one uses degrees and the othe radians assert crs1 == crs3
def test_from_proj4_json(): json_str = '{"proj": "longlat", "ellps": "WGS84", "datum": "WGS84"}' proj = CRS.from_string(json_str) assert proj.to_proj4(4) == "+proj=longlat +datum=WGS84 +no_defs +type=crs" assert proj.to_proj4(5) == "+proj=longlat +datum=WGS84 +no_defs +type=crs" # Test with invalid JSON code with pytest.raises(CRSError): assert CRS.from_string("{foo: bar}")
def test_from_string(): wgs84_crs = CRS.from_string("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") assert wgs84_crs.to_proj4() == "+proj=longlat +datum=WGS84 +no_defs +type=crs" # Make sure this doesn't get handled using the from_epsg() even though 'epsg' is in the string epsg_init_crs = CRS.from_string("+init=epsg:26911 +units=m +no_defs=True") assert ( epsg_init_crs.to_proj4() == "+proj=utm +zone=11 +datum=NAD83 +units=m +no_defs +type=crs" )
def test_is_projected(): assert CRS({"init": "EPSG:3857"}).is_projected is True lcc_crs = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +proj=lcc +x_0=0 +units=m +lat_2=77 +lat_1=49 +lat_0=0" ) assert CRS.from_user_input(lcc_crs).is_projected is True wgs84_crs = CRS.from_string("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") assert CRS.from_user_input(wgs84_crs).is_projected is False
def test_is_geographic(): assert CRS({"init": "EPSG:4326"}).is_geographic is True assert CRS({"init": "EPSG:3857"}).is_geographic is False wgs84_crs = CRS.from_string("+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") assert wgs84_crs.is_geographic is True nad27_crs = CRS.from_string("+proj=longlat +ellps=clrk66 +datum=NAD27 +no_defs") assert nad27_crs.is_geographic is True lcc_crs = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +proj=lcc +x_0=0 +units=m +lat_2=77 +lat_1=49 +lat_0=0" ) assert lcc_crs.is_geographic is False
def test_bare_parameters(): """ Make sure that bare parameters (e.g., no_defs) are handled properly, even if they come in with key=True. This covers interaction with pyproj, which makes presents bare parameters as key=<bool>.""" # Example produced by pyproj proj = CRS.from_string( "+proj=lcc +lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +x_0=0 +units=m +lat_2=77 +lat_1=49 +lat_0=0" ) assert "+no_defs" in proj.to_proj4(4) # TODO: THIS DOES NOT WORK proj = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +proj=lcc +y_0=0 +no_defs=False +x_0=0 +units=m +lat_2=77 +lat_1=49 +lat_0=0" )
def test_from_esri_wkt(): projection_string = ( 'PROJCS["USA_Contiguous_Albers_Equal_Area_Conic_USGS_version",' 'GEOGCS["GCS_North_American_1983",DATUM["D_North_American_1983",' 'SPHEROID["GRS_1980",6378137.0,298.257222101]],' 'PRIMEM["Greenwich",0.0],' 'UNIT["Degree",0.0174532925199433]],' 'PROJECTION["Albers"],' 'PARAMETER["false_easting",0.0],' 'PARAMETER["false_northing",0.0],' 'PARAMETER["central_meridian",-96.0],' 'PARAMETER["standard_parallel_1",29.5],' 'PARAMETER["standard_parallel_2",45.5],' 'PARAMETER["latitude_of_origin",23.0],' 'UNIT["Meter",1.0],' 'VERTCS["NAVD_1988",' 'VDATUM["North_American_Vertical_Datum_1988"],' 'PARAMETER["Vertical_Shift",0.0],' 'PARAMETER["Direction",1.0],UNIT["Centimeter",0.01]]]' ) proj_crs_str = CRS.from_string(projection_string) proj_crs_wkt = CRS(projection_string) assert proj_crs_str.to_proj4() == proj_crs_wkt.to_proj4() assert proj_crs_str.to_proj4(4) == ( "+proj=aea +lat_0=23 +lon_0=-96 +lat_1=29.5 " "+lat_2=45.5 +x_0=0 +y_0=0 +datum=NAD83 +units=m +no_defs +type=crs" )
def test_is_same_crs(): crs1 = CRS({"init": "EPSG:4326"}) crs2 = CRS({"init": "EPSG:3857"}) assert crs1 == crs1 assert crs1 != crs2 wgs84_crs = CRS.from_string("+proj=longlat +ellps=WGS84 +datum=WGS84") assert crs1 == wgs84_crs # Make sure that same projection with different parameter are not equal lcc_crs1 = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +proj=lcc +x_0=0 +units=m +lat_2=77 +lat_1=49 +lat_0=0" ) lcc_crs2 = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +proj=lcc +x_0=0 +units=m +lat_2=77 +lat_1=45 +lat_0=0" ) assert lcc_crs1 != lcc_crs2
def test_empty_json(): with pytest.raises(CRSError): CRS.from_string("{}") with pytest.raises(CRSError): CRS.from_string("[]") with pytest.raises(CRSError): CRS.from_string("")
def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=None, input_dims=('y', 'x')): """Get common data objects used in testing. Returns: tuple with the following elements input_data_on_area: DataArray with dimensions as if it is a gridded dataset. input_area_def: AreaDefinition of the above DataArray input_data_on_swath: DataArray with dimensions as if it is a swath. input_swath: SwathDefinition of the above DataArray target_area_def: AreaDefinition to be used as a target for resampling """ from xarray import DataArray import dask.array as da from pyresample.geometry import AreaDefinition, SwathDefinition from pyresample.utils import proj4_str_to_dict ds1 = DataArray(da.zeros(input_shape, chunks=85), dims=input_dims, attrs={ 'name': 'test_data_name', 'test': 'test' }) if input_dims and 'y' in input_dims: ds1 = ds1.assign_coords(y=da.arange(input_shape[-2], chunks=85)) if input_dims and 'x' in input_dims: ds1 = ds1.assign_coords(x=da.arange(input_shape[-1], chunks=85)) if input_dims and 'bands' in input_dims: ds1 = ds1.assign_coords(bands=list('RGBA'[:ds1.sizes['bands']])) input_proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 ' '+b=6356752.31414 +sweep=x +units=m +no_defs') source = AreaDefinition( 'test_target', 'test_target', 'test_target', proj4_str_to_dict(input_proj_str), input_shape[1], # width input_shape[0], # height (-1000., -1500., 1000., 1500.)) ds1.attrs['area'] = source if CRS is not None: crs = CRS.from_string(input_proj_str) ds1 = ds1.assign_coords(crs=crs) ds2 = ds1.copy() input_area_shape = tuple(ds1.sizes[dim] for dim in ds1.dims if dim in ['y', 'x']) geo_dims = ('y', 'x') if input_dims else None lons = da.random.random(input_area_shape, chunks=50) lats = da.random.random(input_area_shape, chunks=50) swath_def = SwathDefinition(DataArray(lons, dims=geo_dims), DataArray(lats, dims=geo_dims)) ds2.attrs['area'] = swath_def if CRS is not None: crs = CRS.from_string('+proj=latlong +datum=WGS84 +ellps=WGS84') ds2 = ds2.assign_coords(crs=crs) # set up target definition output_proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') output_proj_str = output_proj or output_proj_str target = AreaDefinition( 'test_target', 'test_target', 'test_target', proj4_str_to_dict(output_proj_str), output_shape[1], # width output_shape[0], # height (-1000., -1500., 1000., 1500.), ) return ds1, source, ds2, swath_def, target
def query(self, range_subset=[], subsets={}, bbox=[], datetime_=None, format_='json', **kwargs): """ Extract data from collection collection :param range_subset: list of bands :param subsets: dict of subset names with lists of ranges :param bbox: bounding box [minx,miny,maxx,maxy] :param datetime_: temporal (datestamp or extent) :param format_: data format of output :returns: coverage data as dict of CoverageJSON or native format """ bands = range_subset LOGGER.debug('Bands: {}, subsets: {}'.format(bands, subsets)) args = {'indexes': None} shapes = [] if all([not bands, not subsets, not bbox, format_ != 'json']): LOGGER.debug('No parameters specified, returning native data') return read_data(self.data) if all([ self._coverage_properties['x_axis_label'] in subsets, self._coverage_properties['y_axis_label'] in subsets, len(bbox) > 0 ]): msg = 'bbox and subsetting by coordinates are exclusive' LOGGER.warning(msg) raise ProviderQueryError(msg) if len(bbox) > 0: minx, miny, maxx, maxy = bbox crs_src = CRS.from_epsg(4326) if 'crs' in self.options: crs_dest = CRS.from_string(self.options['crs']) else: crs_dest = self._data.crs if crs_src == crs_dest: LOGGER.debug('source bbox CRS and data CRS are the same') shapes = [{ 'type': 'Polygon', 'coordinates': [[ [minx, miny], [minx, maxy], [maxx, maxy], [maxx, miny], [minx, miny], ]] }] else: LOGGER.debug('source bbox CRS and data CRS are different') LOGGER.debug('reprojecting bbox into native coordinates') t = Transformer.from_crs(crs_src, crs_dest, always_xy=True) minx2, miny2 = t.transform(minx, miny) maxx2, maxy2 = t.transform(maxx, maxy) LOGGER.debug('Source coordinates: {}'.format( [minx, miny, maxx, maxy])) LOGGER.debug('Destination coordinates: {}'.format( [minx2, miny2, maxx2, maxy2])) shapes = [{ 'type': 'Polygon', 'coordinates': [[ [minx2, miny2], [minx2, maxy2], [maxx2, maxy2], [maxx2, miny2], [minx2, miny2], ]] }] elif (self._coverage_properties['x_axis_label'] in subsets and self._coverage_properties['y_axis_label'] in subsets): LOGGER.debug('Creating spatial subset') x = self._coverage_properties['x_axis_label'] y = self._coverage_properties['y_axis_label'] shapes = [{ 'type': 'Polygon', 'coordinates': [[[subsets[x][0], subsets[y][0]], [subsets[x][0], subsets[y][1]], [subsets[x][1], subsets[y][1]], [subsets[x][1], subsets[y][0]], [subsets[x][0], subsets[y][0]]]] }] if bands: LOGGER.debug('Selecting bands') args['indexes'] = list(map(int, bands)) with rasterio.open(self.data) as _data: LOGGER.debug('Creating output coverage metadata') out_meta = _data.meta if self.options is not None: LOGGER.debug('Adding dataset options') for key, value in self.options.items(): out_meta[key] = value if shapes: # spatial subset try: LOGGER.debug('Clipping data with bbox') out_image, out_transform = rasterio.mask.mask( _data, filled=False, shapes=shapes, crop=True, indexes=args['indexes']) except ValueError as err: LOGGER.error(err) raise ProviderQueryError(err) out_meta.update({ 'driver': self.native_format, 'height': out_image.shape[1], 'width': out_image.shape[2], 'transform': out_transform }) else: # no spatial subset LOGGER.debug('Creating data in memory with band selection') out_image = _data.read(indexes=args['indexes']) if bbox: out_meta['bbox'] = [bbox[0], bbox[1], bbox[2], bbox[3]] elif shapes: out_meta['bbox'] = [ subsets[x][0], subsets[y][0], subsets[x][1], subsets[y][1] ] else: out_meta['bbox'] = [ _data.bounds.left, _data.bounds.bottom, _data.bounds.right, _data.bounds.top ] out_meta['units'] = _data.units LOGGER.debug('Serializing data in memory') with MemoryFile() as memfile: with memfile.open(**out_meta) as dest: dest.write(out_image) if format_ == 'json': LOGGER.debug('Creating output in CoverageJSON') out_meta['bands'] = args['indexes'] return self.gen_covjson(out_meta, out_image) else: # return data in native format LOGGER.debug('Returning data in native format') return memfile.read()
def do_work(bucket, path, extension, product_type): count = 0 #limit = None # List the bucket and get all the files with the extension we want files = get_matching_s3_keys(bucket, prefix=path, suffix=extension) s3_path_template = "s3://{bucket}/{file}" for s3_path in files: full_path = f"s3://{bucket}/{s3_path}" # Generate raster from file path raster = rasterio.open(full_path) # Extract bounds and crs bounds = raster.bounds crs_string = raster.crs.to_wkt() # hardcode date to_date = datetime.datetime(year=2018, month=1, day=1) from_date = datetime.datetime(year=2018, month=1, day=1) centre_date = datetime.datetime(year=2018, month=1, day=1) print(to_date) # Handle coordinates top = bounds.top bottom = bounds.bottom right = bounds.right left = bounds.left inProj = Proj(CRS.from_string(crs_string)) outProj = Proj(init='epsg:4326') left_ll, bottom_ll = transform(inProj, outProj, left, bottom) right_ll, top_ll = transform(inProj, outProj, right, top) # unprojected coordinates = { 'ul': { 'lon': left_ll, 'lat': top_ll }, 'ur': { 'lon': right_ll, 'lat': top_ll }, 'lr': { 'lon': right_ll, 'lat': bottom_ll }, 'll': { 'lon': left_ll, 'lat': bottom_ll } } # projected geo_ref_points = { 'ul': { 'x': left, 'y': top }, 'ur': { 'x': right, 'y': top }, 'lr': { 'x': right, 'y': bottom }, 'll': { 'x': left, 'y': bottom } } # Build a dataset dictionary docdict = { 'id': str(uuid.uuid5(uuid.NAMESPACE_URL, s3_path)), 'product_type': product_type, 'creation_dt': centre_date, 'platform': { 'code': 'slim' }, 'instrument': { 'name': 'slim' }, 'extent': { 'from_dt': from_date, 'to_dt': to_date, 'center_dt': centre_date, 'coord': coordinates, }, 'format': { 'name': 'GeoTiff' }, 'grid_spatial': { 'projection': { 'geo_ref_points': geo_ref_points, 'spatial_reference': crs_string, } }, 'image': { 'bands': { 'band1': { 'path': full_path, 'layer': 1, } } }, 'lineage': { 'source_datasets': {} } } # Now index into the Datacube postgres DB dc = datacube.Datacube() index = dc.index resolver = Doc2Dataset(index) dataset, err = resolver(docdict, full_path) if err is not None: logging.error("%s", err) else: try: index.datasets.add(dataset) except changes.DocumentMismatchError as e: index.datasets.update(dataset, {tuple(): changes.allow_any}) except Exception as e: err = e logging.error("Unhandled exception {}".format(e))
def _assign_data_crs(self, root: etree._Element) -> None: gml = f"{{{root.nsmap['gml']}}}" elm = next(root.iter(f"{gml}featureMember")) pstr = next(elm.iter(f"{gml}Polygon")).attrib["srsName"] self.data_crs = CRS.from_string(f"+init={pstr}")
def build_crs(zone_num: str = None, datum: str = None, epsg: str = None, projected: bool = True): horizontal_crs = None if epsg: try: horizontal_crs = CRS.from_epsg(int(epsg)) except CRSError: # if the CRS we generate here has no epsg, when we save it to disk we save the proj string horizontal_crs = CRS.from_string(epsg) elif not epsg and not projected: datum = datum.upper() if datum == 'NAD83': horizontal_crs = CRS.from_epsg(epsg_determinator('nad83(2011)')) elif datum == 'NAD83 PA11': horizontal_crs = CRS.from_epsg(epsg_determinator('nad83(pa11)')) elif datum == 'NAD83 MA11': horizontal_crs = CRS.from_epsg(epsg_determinator('nad83(ma11)')) elif datum == 'WGS84': horizontal_crs = CRS.from_epsg(epsg_determinator('wgs84')) else: err = 'ERROR: {} not supported (geographic). Only supports WGS84, NAD83, NAD83 PA11, NAD83 MA11'.format( datum) return horizontal_crs, err elif not epsg and projected: datum = datum.upper() zone = zone_num # this will be the zone and hemi concatenated, '10N' try: zone, hemi = int(zone[:-1]), str(zone[-1:]) except: err = 'ERROR: found invalid projected zone/hemisphere identifier: {}, expected something like "10N"'.format( zone) return horizontal_crs, err if datum == 'NAD83': try: myepsg = epsg_determinator('nad83(2011)', zone=zone, hemisphere=hemi) except: err = 'ERROR: unable to determine epsg for NAD83(2011), zone={}, hemisphere={}, out of bounds?'.format( zone, hemi) return horizontal_crs, err horizontal_crs = CRS.from_epsg(myepsg) elif datum == 'NAD83 PA11': try: myepsg = epsg_determinator('nad83(pa11)', zone=zone, hemisphere=hemi) except: err = 'ERROR: unable to determine epsg for NAD83 PA11, zone={}, hemisphere={}, out of bounds?'.format( zone, hemi) return horizontal_crs, err horizontal_crs = CRS.from_epsg(myepsg) elif datum == 'NAD83 MA11': try: myepsg = epsg_determinator('nad83(ma11)', zone=zone, hemisphere=hemi) except: err = 'ERROR: unable to determine epsg for NAD83 MA11, zone={}, hemisphere={}, out of bounds?'.format( zone, hemi) return horizontal_crs, err horizontal_crs = CRS.from_epsg(myepsg) elif datum == 'WGS84': try: myepsg = epsg_determinator('wgs84', zone=zone, hemisphere=hemi) except: err = 'ERROR: unable to determine epsg for WGS84, zone={}, hemisphere={}, out of bounds?'.format( zone, hemi) return horizontal_crs, err horizontal_crs = CRS.from_epsg(myepsg) else: err = 'ERROR: {} not supported (projected). Only supports WGS84, NAD83, NAD83 PA11, NAD83 MA11'.format( datum) return horizontal_crs, err return horizontal_crs, ''
def test_crs_OSR_no_equivalence(): crs1 = CRS.from_string("+proj=longlat +datum=WGS84 +no_defs") crs2 = CRS.from_string("+proj=longlat +datum=NAD27 +no_defs") assert crs1 != crs2
def _update_spref(tree, crs): """Update spref with standard CRS info for epsg codes we recognize.""" spref = '' init = crs['init'] if init == 'epsg:4326': # WGS84 spref = """ <spref> <horizsys> <geograph> <latres>0.000001</latres> <longres>0.000001</longres> <geogunit>Decimal degrees</geogunit> </geograph> <geodetic> <horizdn>D_WGS_1984</horizdn> <ellips>WGS_1984</ellips> <semiaxis>6378137.000000</semiaxis> <denflat>298.257224</denflat> </geodetic> </horizsys> </spref>""" elif init == 'epsg:4269': # NAD83 spref = """ <spref> <horizsys> <geograph> <latres>0.000001</latres> <longres>0.000001</longres> <geogunit>Decimal degrees</geogunit> </geograph> <geodetic> <horizdn>North American Datum of 1983</horizdn> <ellips>GRS1980</ellips> <semiaxis>6378137.0</semiaxis> <denflat>298.257222</denflat> </geodetic> </horizsys> </spref>""" elif init == 'epsg:2261': # NAD83 State Plane New York Central feet spref = """ <spref> <horizsys> <planar> <gridsys> <gridsysn>State Plane Coordinate System 1983</gridsysn> <spcs> <spcszone>3102</spcszone> <transmer> <sfctrmer>0.9999375</sfctrmer> <longcm>-76.5833333334</longcm> <latprjo>40</latprjo> <feast>250000</feast> <fnorth>0</fnorth> </transmer> </spcs> </gridsys> <planci> <plance>Coordinate Pair</plance> <coordrep> <absres>1</absres> <ordres>1</ordres> </coordrep> <plandu>US survey feet</plandu> </planci> </planar> <geodetic> <horizdn>North American Datum of 1983</horizdn> <ellips>Geodetic Reference System 80</ellips> <semiaxis>6378206</semiaxis> <denflat>294.9786982</denflat> </geodetic> </horizsys> </spref>""" elif init == 'epsg:26718': # NAD27 UTM zone 18N spref = """ <spref> <horizsys> <planar> <gridsys> <gridsysn>Universal Transverse Mercator</gridsysn> <utm> <utmzone>18</utmzone> <transmer> <sfctrmer>0.999600</sfctrmer> <longcm>-75.000000</longcm> <latprjo>0.000000</latprjo> <feast>500000.000000</feast> <fnorth>0.000000</fnorth> </transmer> </utm> </gridsys> <planci> <plance>coordinate pair</plance> <coordrep> <absres>0.000256</absres> <ordres>0.000256</ordres> </coordrep> <plandu>meters</plandu> </planci> </planar> <geodetic> <horizdn>North American Datum of 1927</horizdn> <ellips>Clarke 1866</ellips> <semiaxis>6378206.400000</semiaxis> <denflat>294.978698</denflat> </geodetic> </horizsys> </spref>""" elif init == 'epsg:26918': # NAD83 UTM zone 18N spref = """ <spref> <horizsys> <planar> <gridsys> <gridsysn>Universal Transverse Mercator</gridsysn> <utm> <utmzone>18</utmzone> <transmer> <sfctrmer>0.999600</sfctrmer> <longcm>-75.000000</longcm> <latprjo>0.000000</latprjo> <feast>500000.000000</feast> <fnorth>0.000000</fnorth> </transmer> </utm> </gridsys> <planci> <plance>coordinate pair</plance> <coordrep> <absres>0.000512</absres> <ordres>0.000512</ordres> </coordrep> <plandu>meters</plandu> </planci> </planar> <geodetic> <horizdn>North American Datum of 1983</horizdn> <ellips>Geodetic Reference System 80</ellips> <semiaxis>6378137.000000</semiaxis> <denflat>298.257222</denflat> </geodetic> </horizsys> </spref>""" elif init == 'epsg:32618': # WGS84 UTM zone 18N spref = """ <spref> <horizsys> <planar> <gridsys> <gridsysn>Universal Transverse Mercator</gridsysn> <utm> <utmzone>18</utmzone> <transmer> <sfctrmer>0.999600</sfctrmer> <longcm>-75.000000</longcm> <latprjo>0.000000</latprjo> <feast>500000.000000</feast> <fnorth>0.000000</fnorth> </transmer> </utm> </gridsys> <planci> <plance>coordinate pair</plance> <coordrep> <absres>0.000000</absres> <ordres>0.000000</ordres> </coordrep> <plandu>meters</plandu> </planci> </planar> <geodetic> <horizdn>D_WGS_1984</horizdn> <ellips>WGS_1984</ellips> <semiaxis>6378137.000000</semiaxis> <denflat>298.257224</denflat> </geodetic> </horizsys> </spref>""" else: # The above covers many CRS in New York, but for any others, # we'll at least provide the EPSG code and proj4 definition crs = CRS.from_string(init) spref = """ <spref> <horizsys> <local> <localdes>{}</localdes> <localgeo>{}</localgeo> </local> </horizsys> </spref>""".format(crs.to_string(), crs.to_wkt()) if spref: _remove_path(tree, './spref') _insert_after_last(tree, spref, 'idinfo|dataqual|spdoinfo') return tree
def test_epsg__no_code_available(): lcc_crs = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +proj=lcc " "+x_0=0 +units=m +lat_2=77 +lat_1=49 +lat_0=0") assert lcc_crs.to_epsg() is None
def test_epsg__not_found(): assert CRS("+proj=longlat +datum=WGS84 +no_defs").to_epsg(0) is None assert CRS.from_string("+proj=longlat +datum=WGS84 +no_defs").to_epsg() is None
def test_from_wkt(): wgs84 = CRS.from_string("+proj=longlat +datum=WGS84 +no_defs") from_wkt = CRS(wgs84.to_wkt()) assert wgs84.to_wkt() == from_wkt.to_wkt()
def test_from_wkt(): wgs84 = CRS.from_string("+proj=longlat +datum=WGS84 +no_defs") from_wkt = CRS.from_wkt(wgs84.to_wkt()) assert wgs84.to_wkt() == from_wkt.to_wkt()
from pyproj import CRS from rasterio.warp import reproject, Resampling import glob import os in_dir = '/data/ABOVE/LANDSAT/LANDCOVER/Annual_Landcover_ABoVE_1691/data/years/mosaic/' out_dir = '/data/ABOVE/LANDSAT/LANDCOVER/Annual_Landcover_ABoVE_1691/data/years/reclassify/' fnames = glob.glob(in_dir + '*Mosaic*.tif') print(len(fnames)) counter = 0 for f in fnames: counter = counter + 1 print(counter) da = xr.open_rasterio(f) # Convert the CRS from string cc = CRS.from_string(da.crs) # Reclassifying the dataset da = xr.where(da == 6, 5, da) da = xr.where(da == 7, 6, da) da = xr.where(da == 8, 6, da) da = xr.where(da == 9, 6, da) da = xr.where(da == 10, 7, da) # Reprojecting to Geographic CRS da.rio.write_crs(cc.to_string(), inplace=True) #da4326 = da.rio.reproject(4326,resampling=Resampling.nearest) #print(f'saving --> {f}') basename = os.path.basename(f) da.rio.to_raster(out_dir + 'Recalssified_' + basename, compress='lzw')
def test_epsg__not_found(): assert CRS("+proj=longlat +datum=WGS84 +no_defs +towgs84=0,0,0").to_epsg( 0) is None assert (CRS.from_string( "+proj=longlat +datum=WGS84 +no_defs +towgs84=0,0,0").to_epsg() is None)
def test_epsg__no_code_available(): lcc_crs = CRS.from_string( "+lon_0=-95 +ellps=GRS80 +y_0=0 +no_defs=True +proj=lcc " "+x_0=0 +units=m +lat_2=77 +lat_1=49 +lat_0=0" ) assert lcc_crs.to_epsg() is None
def coarse_grain_osm_network(G, tolerance=10, return_node_mapping=False): """ Accepts an (unprojected) osmnx graph and coarse grains it. Tolerance is specified in meters. The coarse graining is performed as follows: 1. Around each node, a circle of radius=tolerance meters is drawn. 2. If any two (or more) nodes' circles overlap, they are coarse-grained into a single node. 3. The edges are naturally rewired whenever one (or both) of its endpoints are coarse grained. 4. If the previous step results in a self-loop, it is discarded. """ metadata = G.graph if 'proj' in metadata and metadata['proj'] == 'utm': G_proj = G else: G_proj = ox.project_graph(G) gdf_nodes = ox.graph_to_gdfs(G_proj, edges=False) buffered_nodes = gdf_nodes.buffer(tolerance).unary_union old2new = dict( ) # key=old node label, value={'label': new_node_label, 'x': coord_x, #'y': coord_y, 'lat': ?, 'lon': ?} for node, data in G_proj.nodes(data=True): x, y = data['x'], data['y'] lon, lat = data['lon'], data['lat'] osm_id = data['osmid'] for poly_idx, polygon in enumerate(buffered_nodes): if polygon.contains(Point(x, y)): poly_centroid = polygon.centroid poly_centroid_latlon = utm_to_latlon( XY(x=poly_centroid.x, y=poly_centroid.y), utm_zone=CRS.from_string( G_proj.graph['crs']).to_dict()['zone']) old2new[node] = dict(label=poly_idx, x=poly_centroid.x, y=poly_centroid.y, lon=poly_centroid_latlon.x, lat=poly_centroid_latlon.y) break H = nx.Graph() for node in G_proj.nodes(): new_node_data = old2new[node] new_label = new_node_data['label'] H.add_node(new_label, **new_node_data) for u, v, data in G_proj.edges(data=True): u2, v2 = old2new[u]['label'], old2new[v]['label'] if u2 != v2: H.add_edge(u2, v2, **data) H.graph = {'crs': G_proj.graph['crs'], 'name': G_proj.graph['name']} if return_node_mapping is True: return H, { old_node: data['label'] for old_node, data in old2new.items() } else: return H