def setup_class(cls): cls.base_dir = os.path.abspath(os.path.dirname(__file__)) path = os.path.join(cls.base_dir, "data/vector") path = os.path.join(path, "bay_area_counties.geojson") cls.counties, cls.counties_df = vt.read_geojson(path, index="NAME") clu_path = "data/vector/clu/clu_public_a_il189.shp" cls.clus, _ = vt.read_layer(os.path.join(cls.base_dir, clu_path)) il_path = os.path.join(cls.base_dir, "data/vector/il.geojson") cls.il, cls.il_df = vt.read_geojson(il_path, index="name")
def test_intersects(): shape, _ = vt.read_geojson(get_path('test_shape.json')) with open(get_path('test_soils.json')) as f: soils = f.read() soils = vt.read_geojson(soils)[0] soils.intersects(shape[0]) assert (shape[0].IsValid())
def read_catalog(dataset_catalog_filename_or_handle, workdir=None): """Take a catalog file and create a raster dataset Parameters ---------- dataset_catalog_filename_or_handle : str or opened file handle if str : Path to catalog file for the dataset. May be relative or absolute. Catalog files are in json format, and usually represent a type of data (e.g. CDL) and a year (e.g. 2014). Returns ------- RasterDataset See Also -------- scripts/create_catalog.py : How to create a catalog file for a dataset. raster_query_test.py : Simple examples of exercising RasterQuery on tiled and untiled datasets, and computing stats from results. vector.py : Details of VectorLayer.""" if isinstance(dataset_catalog_filename_or_handle, string_types): with open(dataset_catalog_filename_or_handle) as catalog_file: decoded = json.load(catalog_file) else: decoded = json.load(dataset_catalog_filename_or_handle) size = [int(x) for x in decoded["Size"]] coordinate_system = str(decoded["CoordinateSystem"]) transform = decoded["GeoTransform"] # Get the projection for the raster proj = osr.SpatialReference() proj.ImportFromWkt(coordinate_system) if workdir is None: path = decoded["Path"] else: path = os.path.join(workdir, decoded["Path"]) grid_size = decoded.get("GridSize", None) index = None tile_structure = None if "Index" in decoded: index, index_df = read_geojson(json.dumps(decoded["Index"]), index="location") index = index.transform(proj) if "Tile_structure" in decoded: tile_structure = decoded["Tile_structure"] return RasterDataset(path, size[0], size[1], geo_transform=transform, proj=proj, grid_size=grid_size, index=index, tile_structure=tile_structure)
def test_vector_json_io_gs(): filepath = 'data/vector/test_shape.json' gs_path = 'gs://%s/%s' % (gs_bucket_name, filepath) # write a vector json file to gs shape, _ = vt.read_geojson(os.path.join(base, filepath)) shape.to_json(path=gs_path, precision=15) # read the vector json file from gs shape_json = vt.fetch_geojson(gs_path) geojson, _ = vt.read_geojson(shape_json) # ensure it is valid assert geojson[0].IsValid() # clean up delete_gs_key(filepath)
def __test_read(): path = ("http://www2.census.gov/geo/tiger/GENZ2014/shp/" "cb_2014_us_state_500k.zip") assert len(vt.read_layer(path)[0]) == 56 path = get_path("cb_2014_us_state_500k.zip") assert len(vt.read_layer(path)[0]) == 56 paths = ["http://eric.clst.org/wupl/Stuff/gz_2010_us_040_00_500k.json", get_path("gz_2010_us_040_00_500k.json")] for path in paths: assert len(vt.read_geojson(path)[0]) == 52 geojson_str = open(paths[1]).read() assert len(vt.read_geojson(geojson_str)[0]) == 52
def setup_class(cls): path1 = get_path("clu/four_shapes_2il_2ca.geojson") path2 = get_path("gz_2010_us_040_00_500k.json") path3 = get_path("bay_area_counties.geojson") path4 = get_path("bay_area_zips.geojson") cls.vl1, cls.df1 = vt.read_geojson(path1) cls.vl2, cls.df2 = vt.read_geojson(path2) cls.counties, cls.df3 = vt.read_geojson(path3, index="NAME") cls.sf = "San Francisco" proj = projection_from_epsg() rect.AssignSpatialReference(proj) farallon.AssignSpatialReference(proj) cls.counties[cls.sf] = cls.counties[cls.sf].Difference(farallon) cls.zips, cls.df4 = vt.read_geojson(path4, index="ZCTA5CE10") p = get_path("clu/four_shapes_2il_2ca.p") cls.df = pickle.load(open(p)) assert isinstance(cls.counties, vt.VectorLayer) assert isinstance(cls.counties["San Francisco"], ogr.Geometry)
def test_shapes_outside_raster_should_be_filtered(self): # 2 shapes in IL that should be in range, # plus 2 shapes in CA that should be out of range. p = get_path("vector/clu/four_shapes_2il_2ca.geojson") vl_outside, vl_df = read_geojson(p) assert (len(vl_outside.keys()) == 4) dataset_catalog_file = get_path("../catalog/cdl_2014.json") rd = read_catalog(dataset_catalog_file) df = self.make_dataframe(rd.query(vl_outside)) assert (len(df.index) == 4) sums = df.sum(axis=1).map(int) assert df[sums < 1e-6].shape[0] == 2 assert df[(sums - 1).map(np.abs) < 1e-6].shape[0] == 2
def read_catalog(dataset_catalog_file): """Take a catalog file and create a raster dataset Parameters ---------- dataset_catalog_file : str Path to catalog file for the dataset. May be relative or absolute. Catalog files are in json format, and usually represent a type of data (e.g. CDL) and a year (e.g. 2014). Returns ------- RasterDataset See Also -------- scripts/create_catalog.py : How to create a catalog file for a dataset. raster_query_test.py : Simple examples of exercising RasterQuery on tiled and untiled datasets, and computing stats from results. vector.py : Details of VectorLayer.""" with open(dataset_catalog_file) as catalog_file: decoded = json.load(catalog_file) size = map(int, decoded["Size"]) coordinate_system = str(decoded["CoordinateSystem"]) transform = decoded["GeoTransform"] # Get the projection for the raster proj = osr.SpatialReference() proj.ImportFromWkt(coordinate_system) path = decoded["Path"] grid_size = decoded.get("GridSize", None) index = None tile_regex = None if "Index" in decoded: index, index_df = read_geojson(json.dumps(decoded["Index"]), index="location") index = index.transform(proj) tile_regex = re.compile('([0-9]+)_([0-9]+)\.tif') return RasterDataset(path, size[0], size[1], transform, proj, grid_size=grid_size, index=index, tile_regex=tile_regex)
def test_co_ne_border(self): p = get_path("vector/clu/clu_public_a_co095.shp") vl, _ = read_layer(p) p = get_path("vector/co_ne_border.geojson") co_ne_border, df = read_geojson(p) vl = vl.within(co_ne_border.bbox()) rd = read_catalog(get_path("../catalog/co_soil.json")) for r in rd.query(vl): r #compute_stats(r.values, r.weights) rd = read_catalog(get_path("../catalog/co_soil_bad.json")) failed = False try: for r in rd.query(vl): r #compute_stats(r.values, r.weights) except IndexError: failed = True assert failed
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import os from pyspatial.vector import read_geojson, to_geometry from pyspatial.raster import read_raster import numpy as np from numpy.testing import assert_array_almost_equal from test_raster_query import compute_stats base_dir = os.path.abspath(os.path.dirname(__file__)) get_path = lambda x: os.path.join(base_dir, "data", x) vl, vldf = read_geojson(get_path("vector/small_polygon.geojson")) rd = read_raster(get_path("raster/95000_45000.tif")) shp = vl[0] def test_small_polygon(): bboxes = vl.boundingboxes() shp_px = rd.to_pixels(bboxes)[0] grid = rd.to_geometry_grid(*shp_px.bounds) areas = {} for i, b in grid.iteritems(): diff = b.Intersection(to_geometry(shp, proj=rd.proj)) areas[i] = diff.GetArea()
def test_to_json(): shape, _ = vt.read_geojson(get_path('test_shape.json')) # This shape requires more precision when serializing j = shape.to_json(precision=15) jj, _ = vt.read_geojson(j) assert jj[0].IsValid()
if len(tiles) == 0: raise ValueError("%s is empty" % args.tile_path) tile = os.path.join(args.tile_path, tiles[0]) ds = gdal.OpenShared(tile) if ds is None: raise ValueError("Unable to open file: %s" % tile) xsize = ds.RasterXSize ysize = ds.RasterYSize if xsize != ysize: raise ValueError("tiles must have same X and Y size") catalog["GridSize"] = xsize catalog["Path"] = args.tile_path else: raise ValueError("tiles path does not exist: %s" % args.tile_path) if args.index_path is not None: if args.index_path.endswith("json"): index, index_df = read_geojson(args.index_path) else: index = read_layer(args.index_path) catalog["Index"] = index.transform(projection_from_string()).to_dict() if args.dest is not None: with open(args.dest, "w+b") as outf: outf.write(json.dumps(catalog, indent=4, sort_keys=True)) else: print json.dumps(catalog, indent=4, sort_keys=True)