def test_nopoints():
    with pytest.raises(TypeError):
        shapely_to_ogr_type('Point')
    with pytest.raises(TypeError):
        shapely_to_ogr_type('MultiPoint')

        raster_stats(geoms, raster, global_src_extent=True)
Exemple #2
0
 def get_raster_stats(rastername):
     raster_path = os.path.join(tdir, rastername)
     stats = raster_stats(geom.wkt, raster_path, stats="median mean sum")
     if any(val is None or math.isnan(val) for val in stats[0].values()):
         # fall back to point centroid
         stats = raster_stats(geom.centroid.wkt, raster_path, stats="median mean sum")
     return stats[0]
def test_range():
    polygons = os.path.join(DATA, 'polygons.shp')
    stats = raster_stats(polygons, raster, stats="range min max")
    for stat in stats:
        assert stat['range'] == stat['max'] - stat['min']
    ranges = [x['range'] for x in stats]
    # without min/max specified
    stats = raster_stats(polygons, raster, stats="range")
    assert not stats[0].has_key('min')
    assert ranges == [x['range'] for x in stats]
def test_iterable_geoms_geo():  
    reader = shapefile.Reader(os.path.join(DATA, 'polygons.shp'))  
    geoms = (x.shape for x in reader.shapeRecords())
    stats = raster_stats(geoms, raster)
    assert len(stats) == 2
    assert stats[0]['count'] == 75
    assert stats[1]['count'] == 50
def test_iterable_geolike():  
    reader = shapefile.Reader(os.path.join(DATA, 'polygons.shp'))  
    geoms = [x.shape.__geo_interface__ for x in reader.shapeRecords()]
    stats = raster_stats(geoms, raster)
    assert len(stats) == 2
    assert stats[0]['count'] == 75
    assert stats[1]['count'] == 50
def test_categorical():
    polygons = os.path.join(DATA, 'polygons.shp')
    categorical_raster = os.path.join(DATA, 'slope_classes.tif') 
    stats = raster_stats(polygons, categorical_raster, categorical=True)
    assert len(stats) == 2
    assert stats[0][1.0] == 75
    assert stats[1].has_key(5.0)
def test_main():
    polygons = os.path.join(DATA, 'polygons.shp')
    stats = raster_stats(polygons, raster)
    for key in ['__fid__', 'count', 'min', 'max', 'mean']:
        assert stats[0].has_key(key)
    assert len(stats) == 2
    assert stats[0]['count'] == 75
    assert stats[1]['count'] == 50
def test_points():
    points = os.path.join(DATA, 'points.shp')
    stats = raster_stats(points, raster)
    # three features
    assert len(stats) == 3
    # three pixels
    assert sum([x['count'] for x in stats]) == 3
    assert round(stats[0]['mean'], 3) == 11.386
    assert round(stats[1]['mean'], 3) == 35.547
def test_points_categorical():
    points = os.path.join(DATA, 'points.shp')
    categorical_raster = os.path.join(DATA, 'slope_classes.tif') 
    stats = raster_stats(points, categorical_raster, categorical=True)
    # three features
    assert len(stats) == 3
    assert not stats[0].has_key('mean')
    assert stats[0][1.0] == 1
    assert stats[1][2.0] == 1
def test_nodata_value():
    polygons = os.path.join(DATA, 'polygons.shp')
    categorical_raster = os.path.join(DATA, 'slope_classes.tif') 
    stats = raster_stats(polygons, categorical_raster, stats="*",
        categorical=True, nodata_value=1.0)
    assert stats[0]['majority'] == None
    assert stats[0]['count'] == 0  # no pixels; they're all null
    assert stats[1]['minority'] == 2.0
    assert stats[1]['count'] == 49 # used to be 50 if we allowed 1.0
    assert not stats[0].has_key('1.0')
Exemple #11
0
def drawSectors(center, radius, sectors, start, steps):
    end = 360 + start  # end of circle in degrees

    # prepare parameters
    if start > end:
        start = start - 360
    else:
        pass

    step_angle_width = (end - start) / steps
    sector_width = (end - start) / sectors
    steps_per_sector = int(math.ceil(steps / sectors))

    global features
    features = []
    for x in xrange(0, int(sectors)):
        segment_vertices = []

        # first the center and first point
        segment_vertices.append(polar_point(center, 0, 0))
        segment_vertices.append(
            polar_point(center, start + x * sector_width, radius))

        # then the sector outline points
        for z in xrange(1, steps_per_sector):
            segment_vertices.append(
                (polar_point(center,
                             start + x * sector_width + z * step_angle_width,
                             radius)))

        # then again the center point to finish the polygon
        segment_vertices.append(
            polar_point(center, start + x * sector_width + sector_width,
                        radius))
        segment_vertices.append(polar_point(center, 0, 0))

        # create feature
        features.append(Polygon(segment_vertices))

    polys2 = gpd.GeoSeries(features)
    global df2
    df2 = gpd.GeoDataFrame({'geometry': polys2, 'id': range(sectors)})
    df2.to_file("./output/sectors3.shp")

    global res  #raster
    res = zonal_stats(df2, path)  #raster's path
    df_stat = gpd.GeoDataFrame(res)
    #print df_stat

    global res1  #raster
    res1 = raster_stats(df2, path, stats="*",
                        copy_properties=True)  #raster's path
    df_stat1 = gpd.GeoDataFrame(res1)
    print df_stat1
def get_future_clim(pt):
    wkt = "POINT(%f %f)" % pt
    future_clim = []
    for variable in variables:
        year = 2060
        climate = "Ensemble_rcp60"
        path = raster_path(year, climate, variable[0])
        stats = raster_stats(wkt, path, stats="max")
        val = stats[0]['max']
        future_clim.append(val)

    return future_clim
def test_nonsense():
    polygons = os.path.join(DATA, 'polygons.shp')
    with pytest.raises(RasterStatsError):
        raster_stats("blaghrlargh", raster)
    with pytest.raises(RasterStatsError):
        raster_stats(polygons, "blercherlerch")
    with pytest.raises(RasterStatsError):
        raster_stats(["blaghrlargh",], raster)
def query_climate(pt):
    wkt = "POINT(%f %f)" % pt
    data = {}
    for variable in variables:
        data[variable[1]] = {}
        for climate in climates:
            data[variable[1]][climate] = []
            for year in years:
                path = raster_path(year, climate, variable[0])
                stats = raster_stats(wkt, path, stats="max")
                val = stats[0]['max']
                if "_tenths" in variable[0]:
                    val = val / 10.0
                data[variable[1]][climate].append(val)

    return data
def test_iterable_features_geo():  
    # Grr pyshp doesnt do feature-level geo_interface so we need to construct it 
    reader = shapefile.Reader(os.path.join(DATA, 'polygons.shp'))  
    features = []
    class FeatureThing(object):
        pass
    fields = reader.fields[1:]  
    field_names = [field[0] for field in fields]  
    for sr in reader.shapeRecords():
        geom = sr.shape.__geo_interface__  
        atr = dict(zip(field_names, sr.record))  
        obj = FeatureThing()
        obj.__geo_interface__ = dict(geometry=geom,properties=atr,type="Feature")
        features.append(obj)
    stats = raster_stats(features, raster)
    assert len(stats) == 2
    assert stats[0]['count'] == 75
    assert stats[1]['count'] == 50
def get_current_clims(pts):
    data = []
    wkts = ["POINT(%f %f)" % pt for pt in pts]
    for variable in variables:
        print variable[1]
        year = 1990
        climate = "Ensemble_rcp45"
        print "\tgetting raster stats..."
        path = raster_path(year, climate, variable[0])
        stats = raster_stats(wkts, path, stats="max")
        #vals = [x['max'] for x in stats if not math.isnan(x['max'])]
        vals = [x['max'] for x in stats]
        data.append(vals)

    # transpose
    print "Transposing..."
    data = map(list, zip(*data))
    return data
Exemple #17
0
def load_training_vector(response_shapes, explanatory_rasters, response_field, metric='mean'):
    """
    Parameters
    ----------
    response_shapes : Source of vector features for raster_stats; 
                      can be OGR file path or iterable of geojson-like features
    response_field : Field name containing the known response category (must be numeric)
    explanatory_rasters : List of Paths to GDAL rasters containing explanatory variables
    metric : Statistic to aggregate explanatory data across line and polygon vector features
             Defaults to 'mean' (optional)

    Returns
    -------
    train_xs : Array of explanatory variables
    train_ys : 1xN array of known responses
    """
    from rasterstats import raster_stats
    explanatory_dfs = []
    fldnames = []

    for i, raster in enumerate(explanatory_rasters):
        logger.debug("Rasters stats on %s" % raster)
        stats = raster_stats(response_shapes, raster, stats="mean", copy_properties=True)
        df = pd.DataFrame(stats, columns=["__fid__", response_field, metric])
        fldname = "%s_%d" % (metric, i)
        fldnames.append(fldname)

        df.rename(columns={metric: fldname,}, inplace=True)
        orig_count = df.shape[0]
        df = df[pd.notnull(df[fldname])]
        new_count = df.shape[0]
        if (orig_count - new_count) > 0:
            logger.warn('Dropping %d rows due to nans' % (orig_count - new_count))
        explanatory_dfs.append(df)

    current = explanatory_dfs[0]
    for i, frame in enumerate(explanatory_dfs[1:], 2):
        current = current.merge(frame, on=['__fid__', response_field])

    train_y = np.array(current[response_field])
    train_xs = np.array(current[fldnames])

    return train_xs, train_y
Exemple #18
0
def cost_func(stand_wkt, TPA, VPA, SkidDist, Slope):
    cur_path = os.path.dirname(os.path.realpath(__file__))

    ### Input Data
    slope_raster = cur_path + '/data/Slope.tif'

    # Reproject
    source = osr.SpatialReference()
    source.ImportFromEPSG(4326)

    target = osr.SpatialReference()
    target.ImportFromEPSG(26913)

    transform = osr.CoordinateTransformation(source, target)

    stand_geom = ogr.CreateGeometryFromWkt(stand_wkt)
    stand_geom.Transform(transform)
    stand_wkt = stand_geom.ExportToWkt()

    ### Calculation
    area = stand_geom.GetArea()                                         # get area in m2
    area = round(area*0.000247105, 4)                                   # convert to acre and round
    if Slope == None:
        Slope = rasterstats.raster_stats(stand_wkt, slope_raster,stats=['mean'])
        Slope = Slope[0]['mean']                                        # %

    if SkidDist == None:
        SkidDist = skidding.skidDist(stand_wkt)                         # ft

    totalVolume = area*VPA                                              # ft3
    totalWeight = totalVolume*0.0195                                    # US short tons (lodepole pine weight)

    harvestCostFt3 = harvesting.harvestcost(Slope, SkidDist, TPA, VPA)  # $/ft3
    totalHarvestCost = round(harvestCostFt3*totalVolume)                # $
    harvestCostTon = totalHarvestCost/totalWeight                       # $/ton


    return Slope, SkidDist, harvestCostTon, totalHarvestCost
def test_global_non_ogr():
    reader = shapefile.Reader(os.path.join(DATA, 'polygons.shp'))  
    geoms = (x.shape for x in reader.shapeRecords())
    with pytest.raises(RasterStatsError):
        raster_stats(geoms, raster, global_src_extent=True)
Exemple #20
0
from rasterstats import raster_stats
import time


class Timer():
    def __enter__(self):
        self.start = time.time()

    def __exit__(self, *args):
        print "Time:", time.time() - self.start


states = '/data/workspace/rasterstats_blog/boundaries_contus.shp'
precip = '/data/workspace/rasterstats_blog/NA_Annual_Precipitation_GRID/NA_Annual_Precipitation/data/na_anprecip/hdr.adf'
with Timer():
    stats = raster_stats(states, precip, stats="*")
def test_doesnt_exist():
    nonexistent = os.path.join(DATA, 'DOESNOTEXIST.shp')
    with pytest.raises(RasterStatsError):
        raster_stats(nonexistent, raster)
def test_zonal_nodata():
    polygons = os.path.join(DATA, 'polygons.shp')
    stats = raster_stats(polygons, raster, nodata_value=0)
    assert len(stats) == 2
    assert stats[0]['count'] == 75
    assert stats[1]['count'] == 50
def test_all_touched():
    polygons = os.path.join(DATA, 'polygons.shp')
    stats = raster_stats(polygons, raster, all_touched=True)
    assert stats[0]['count'] == 95  # 75 if ALL_TOUCHED=False
    assert stats[1]['count'] == 73  # 50 if ALL_TOUCHED=False
Exemple #24
0
    layer = source.GetLayer()
    layer_defn = layer.GetLayerDefn()
    field_names = [
        layer_defn.GetFieldDefn(u).GetName()
        for u in range(layer_defn.GetFieldCount())
    ]

    for k in scaletif:
        new_mean = ogr.FieldDefn(
            k, ogr.OFTReal)  #add field names to the shape files
        layer.CreateField(new_mean)

    source = None

    for j, g in zip(tif, range(1, len(scaletif) + 1)):
        highstats = raster_stats(i, j, stats="mean", copy_properties=True)

        mymean = []  #get the mean from the dictionary
        for p in highstats:
            mymean.append(p.values()[0])

        #change the dbf file
        #db = pysal.open(l, 'r')
        db = pysal.open(l, 'r')
        #d = {col: db.by_col(col) for col in db.header}
        #dtable=pd.DataFrame(d).fillna(0)
        #dtable[list(dtable.columns)[g]]=mymean
        meansave = []
        for t, u in zip(db, mymean):
            t[g] = u
            meansave.append(t)
def test_multipoints():
    multipoints = os.path.join(DATA, 'multipoints.shp')
    stats = raster_stats(multipoints, raster)
    assert len(stats) == 1
    assert stats[0]['count'] == 3
def test_lines():
    lines = os.path.join(DATA, 'lines.shp')
    stats = raster_stats(lines, raster)
    assert len(stats) == 2
    assert stats[0]['count'] == 58
    assert stats[1]['count'] == 32
Exemple #27
0
scaletif=sorted([i[i.find('clipNDVI'):i.find('.tif')].replace('NDVI','N').replace('clip','').replace('19','').replace('181','18')[:-8]+'_'+i[i.find('clipNDVI'):i.find('.tif')][-1] for i in tif])

for i, l in zip(polyshp, polydbf): #two loops for each shape with different parameters on different layers
	source = ogr.Open(i, 1) #add fields to each shape files
	layer = source.GetLayer()
	layer_defn = layer.GetLayerDefn()
	field_names = [layer_defn.GetFieldDefn(u).GetName() for u in range(layer_defn.GetFieldCount())]
	
	for k in scaletif:
		new_mean = ogr.FieldDefn(k, ogr.OFTReal) #add field names to the shape files
		layer.CreateField(new_mean)
	
	source = None

	for j, g in zip (tif,  range(1, len(scaletif)+1)):
		highstats=raster_stats(i,j, stats="mean", copy_properties=True)
		
		mymean = [] #get the mean from the dictionary
		for p in highstats:
			mymean.append(p.values()[0])
			
		#change the dbf file
		#db = pysal.open(l, 'r')
		db= pysal.open(l, 'r')
		#d = {col: db.by_col(col) for col in db.header}
		#dtable=pd.DataFrame(d).fillna(0)
		#dtable[list(dtable.columns)[g]]=mymean
		meansave = []
		for t, u in zip (db, mymean):
			t[g] = u
			meansave.append(t)
def test_multilines():
    multilines = os.path.join(DATA, 'multilines.shp')
    stats = raster_stats(multilines, raster)
    assert len(stats) == 1
    # can differ slightly based on platform/gdal version
    assert stats[0]['count'] in [89, 90]
def test_partial_overlap():
    polygons = os.path.join(DATA, 'polygons_partial_overlap.shp')
    stats = raster_stats(polygons, raster, stats="count")
    for res in stats:
        # each polygon should have at least a few pixels overlap
        assert res['count'] > 0
def test_csv():
    polygons = os.path.join(DATA, 'polygons.shp')
    stats = raster_stats(polygons, raster, stats="*")
    csv = stats_to_csv(stats)
    assert csv.split()[0] == ','.join(sorted(VALID_STATS + ['__fid__']))
def test_no_overlap():
    polygons = os.path.join(DATA, 'polygons_no_overlap.shp')
    stats = raster_stats(polygons, raster, stats="count")
    for res in stats:
        # no polygon should have any overlap
        assert res['count'] is None
def test_alias():
    polygons = os.path.join(DATA, 'polygons.shp')
    stats = zonal_stats(polygons, raster)
    stats2 = raster_stats(polygons, raster)
    assert stats == stats2
    pytest.deprecated_call(raster_stats, polygons, raster)
def test_categorical_csv():
    polygons = os.path.join(DATA, 'polygons.shp')
    categorical_raster = os.path.join(DATA, 'slope_classes.tif') 
    stats = raster_stats(polygons, categorical_raster, categorical=True)
    csv = stats_to_csv(stats)
    assert csv.split()[0] == "1.0,2.0,5.0,__fid__"
def test_alias():
    polygons = os.path.join(DATA, 'polygons.shp')
    stats = zonal_stats(polygons, raster)
    stats2 = raster_stats(polygons, raster)
    assert stats == stats2
    pytest.deprecated_call(raster_stats, polygons, raster)
Exemple #35
0
from rasterstats import raster_stats
from pprint import pprint
polys = "tests/data/multilines.shp"
raster = "tests/data/slope.tif"
pprint(raster_stats(polys, raster, stats="*"))