def test_set_theoretic(self): proj = projection_from_string(ALBERS_N_AMERICA) counties = self.counties.transform(proj) sf = [self.sf] union_exp = 183.026345584 union_act = counties[sf].union(rect)[self.sf] assert_almost_equal(union_act.GetArea()/1e6, union_exp) intersection_exp = 31.0072128793 intersection_act = counties[sf].intersection(rect)[self.sf] assert_almost_equal(intersection_act.GetArea()/1e6, intersection_exp) symdifference_exp = 152.019132704 symdifference_act = counties[sf].difference(rect, kind="symmetric") assert_almost_equal(symdifference_act[self.sf].GetArea()/1e6, symdifference_exp) ldifference_exp = 95.5399654593 ldifference_act = counties[sf].difference(rect, kind="left") assert_almost_equal(ldifference_act[self.sf].GetArea()/1e6, ldifference_exp) rdifference_exp = 56.4791672452 rdifference_act = counties[sf].difference(rect, kind="right") assert_almost_equal(rdifference_act[self.sf].GetArea()/1e6, rdifference_exp)
def distances(self, shp, proj=None): """Compute the euclidean distances for each of the shapes in the vector layer. If proj is not none, it will transform shp into proj. Note: if shp is a shapely object, it is upto to the user to make sure shp is in the correct coordinate system. Parameters ---------- proj: string or osr.SpatialReference (default=None) valid strings are 'albers' or 'utm'. If None, no transformation of coordinates. Returns ------- pandas.Series Note ---- 'utm' should only be used for small polygons when centimeter level accuraccy is needed. Othewise the area will be incorrect. Similar issues can happen when polygons cross utm boundaries. """ if proj is None: shp = to_geometry(shp) return self.to_geometry(proj=proj).map(lambda x: x.Distance(shp)) if proj == 'utm': if not self.proj.ExportToProj4() == ut.PROJ_WGS84: vl = self.transform(ut.projection_from_string()) else: vl = self _shp = ops.transform(to_utm, to_shapely(shp)) d = vl.to_shapely() \ .map(lambda x: ops.transform(to_utm, x).distance(_shp)) s = pd.Series(d, index=self.index) return s elif proj == 'albers': proj = ut.projection_from_string(ut.ALBERS_N_AMERICA) shp = to_geometry(shp, copy=True, proj=proj) return self.to_geometry(proj=proj).map(lambda x: x.Distance(shp))
def createCatalog(src, tile_path, dest, index_path=None, grid=None): hDataset = gdal.OpenShared(src) # Get projection. proj = hDataset.GetProjectionRef() # Dump to json catalog = {"Path": src, "CoordinateSystem": proj, "GeoTransform": hDataset.GetGeoTransform()} band = hDataset.GetRasterBand(1) ctable = band.GetColorTable() if ctable is not None: colors = [ctable.GetColorEntry(i) for i in range(256)] catalog["ColorTable"] = colors xsize = hDataset.RasterXSize ysize = hDataset.RasterYSize catalog["Size"] = (xsize, ysize) if tile_path is not None: if os.path.exists(tile_path): tiles = os.listdir(tile_path) if len(tiles) == 0: raise ValueError("%s is empty" % tile_path) tile = os.path.join(tile_path, tiles[0]) ds = gdal.OpenShared(tile) if ds is None: raise ValueError("Unable to open file: %s" % tile) xsize = ds.RasterXSize ysize = ds.RasterYSize if xsize != ysize: raise ValueError("tiles must have same X and Y size") catalog["GridSize"] = xsize catalog["Path"] = tile_path + '/' else: raise ValueError("tiles path does not exist: %s" % tile_path) if index_path is not None: read = read_geojson if index_path.endswith("json") else read_layer index = read(index_path) catalog["Index"] = index.transform(projection_from_string()).to_dict() if dest is not None: with open(dest, "w+b") as outf: outf.write(json.dumps(catalog)) else: print json.dumps(catalog)
def areas(self, proj=None): """Compute the areas for each of the shapes in the vector layer. Parameters ---------- proj: string or osr.SpatialReference (default=None) valid strings are 'albers' or 'utm'. If None, no transformation of coordinates. Returns ------- pandas.Series Note ---- 'utm' should only be used for small polygons when centimeter level accuraccy is needed. Othewise the area will be incorrect. Similar issues can happen when polygons cross utm boundaries. """ if proj is None: return self.map(lambda x: x.GetArea()) if proj == 'utm': if self.proj.ExportToProj4().strip() != ut.PROJ_WGS84: vl = self.transform(ut.projection_from_string()) else: vl = self shps = vl.to_shapely() areas = [ops.transform(to_utm, shp).area for shp in shps] s = pd.Series(areas, index=self.index) s.name = "area_sqr_m" return s elif proj == 'albers': proj = ut.projection_from_string(ut.ALBERS_N_AMERICA) return self.transform(proj).areas()
def to_dict(self, df=None): """Return a dictionary representation of the object. Based off the GeoJSON spec. Will transform the vector layer into WGS84 (EPSG:4326). Parameters ---------- df: pandas.DataFrame (default=None) The dataframe to supply the properties of the features. The index of df must match the ids of the VectorLayer. Returns ------- dict """ if self.proj.ExportToProj4() != ut.projection_from_string(): vl = self.transform(ut.projection_from_string()) else: vl = self res = {"type": "FeatureCollection"} res["features"] = [ to_feature(f, i).ExportToJson(as_object=True) for i, f in enumerate(vl) ] if df is not None: for i, f in zip(vl.ids, res["features"]): props = f["properties"] df_props = df.loc[i].to_dict() f["properties"] = dict(props.items() + df_props.items()) f["properties"]["__id__"] = i else: for i, f in zip(vl.ids, res["features"]): f["properties"]["__id__"] = i return res
def to_dict(self, df=None): """Return a dictionary representation of the object. Based off the GeoJSON spec. Will transform the vector layer into WGS84 (EPSG:4326). Parameters ---------- df: pandas.DataFrame (default=None) The dataframe to supply the properties of the features. The index of df must match the ids of the VectorLayer. Returns ------- dict """ if self.proj.ExportToProj4() != ut.projection_from_string(): vl = self.transform(ut.projection_from_string()) else: vl = self res = {"type": "FeatureCollection"} res["features"] = [to_feature(f, i).ExportToJson(as_object=True) for i, f in enumerate(vl)] if df is not None: for i, f in zip(vl.ids, res["features"]): props = f["properties"] df_props = df.loc[i].to_dict() f["properties"] = dict(props.items() + df_props.items()) f["properties"]["__id__"] = i else: for i, f in zip(vl.ids, res["features"]): f["properties"]["__id__"] = i return res
def from_series(geom_series, proj=None): """Create a VectorLayer from a pandas.Series object. If the geometries do not have an spatial reference, EPSG:4326 is assumed. Parameters ---------- geom_series: pandas.Series The series object with shapely geometries proj: osr.SpatialReference The projection to use, defaults to EPSG:4326 Returns ------- VectorLayer """ proj = ut.projection_from_string() if proj is None else proj geoms = geom_series.map(lambda x: to_geometry(x, proj=proj)) return VectorLayer(geoms, proj=proj)
if args.tile_path is not None: if os.path.exists(args.tile_path): tiles = os.listdir(args.tile_path) if len(tiles) == 0: raise ValueError("%s is empty" % args.tile_path) tile = os.path.join(args.tile_path, tiles[0]) ds = gdal.OpenShared(tile) if ds is None: raise ValueError("Unable to open file: %s" % tile) xsize = ds.RasterXSize ysize = ds.RasterYSize if xsize != ysize: raise ValueError("tiles must have same X and Y size") catalog["GridSize"] = xsize catalog["Path"] = args.tile_path else: raise ValueError("tiles path does not exist: %s" % args.tile_path) if args.index_path is not None: read = read_geojson if args.index_path.endswith("json") else read_layer index = read(args.index_path) catalog["Index"] = index.transform(projection_from_string()).to_dict() if args.dest is not None: with open(args.dest, "w+b") as outf: outf.write(json.dumps(catalog)) else: print json.dumps(catalog)
if os.path.exists(args.tile_path): tiles = os.listdir(args.tile_path) if len(tiles) == 0: raise ValueError("%s is empty" % args.tile_path) tile = os.path.join(args.tile_path, tiles[0]) ds = gdal.OpenShared(tile) if ds is None: raise ValueError("Unable to open file: %s" % tile) xsize = ds.RasterXSize ysize = ds.RasterYSize if xsize != ysize: raise ValueError("tiles must have same X and Y size") catalog["GridSize"] = xsize catalog["Path"] = args.tile_path else: raise ValueError("tiles path does not exist: %s" % args.tile_path) if args.index_path is not None: read = read_geojson if args.index_path.endswith("json") else read_layer index = read(args.index_path) catalog["Index"] = index.transform(projection_from_string()).to_dict() if args.dest is not None: with open(args.dest, "w+b") as outf: outf.write(json.dumps(catalog)) else: print json.dumps(catalog)