コード例 #1
0
def define_terrain_defunct(width, length):
    """
    """
    n = math.ceil(math.log(width, 2))
    data = mpd(n)
    dem = data[0:width, 0:length]

    x, y = terrain_min(dem)
    mask = np.where(dem < dem.mean(), 1, 0)
    flood = floodFill(x, y, mask)

    out_number = str(np.random.randint(low=100000, high=999999))
    out_raster = 'temp/' + out_number
    out_shp = 'temp/' + out_number + '.shp'

    write_array(flood, out_raster)

    ds = gdal.Open(out_raster + '.tif')
    band = ds.GetRasterBand(1)

    drv = ogr.GetDriverByName("ESRI Shapefile")

    out = drv.CreateDataSource(out_shp)
    out_layer = out.CreateLayer('terrains', srs=None)

    fd = ogr.FieldDefn("DN", ogr.OFTInteger)
    out_layer.CreateField(fd)

    gdal.Polygonize(band, None, out_layer, 0, [], callback=None)
    out = None

    gdf = gpd.read_file(out_shp)
    return band
コード例 #2
0
def load_geojson_state(df_uri):
    df = gpd.read_file(df_uri)
    return df
コード例 #3
0
from shapely import geometry
from geopandas import gpd
import urllib
import matplotlib.pyplot as plt
# from main import *
from PIL import Image

route = gpd.read_file( "../output/for_Presentation_留仙洞.geojson" )
points = gpd.GeoDataFrame(route[['RID']].merge(DB_panos, on='RID'))

# points.query( "DIR != 0" ).reset_index().to_file( '../output/points_liuxiandong_presetation.geojson', driver="GeoJSON" )
points.query( "DIR != 0", inplace=True )

points.info()



def draw_polygon_by_bbox(bbox=[113.93306,22.57437, 113.9383, 22.58037]):
    # extract the roads of intrest
    from shapely.geometry import LineString

    coords = [bbox[:2], [bbox[0], bbox[3]],
              bbox[2:], [bbox[2], bbox[1]], bbox[:2]]

    area = gpd.GeoDataFrame( [{'name': 'presetation area', 'geometry':LineString(coords)}] )
    return area


def get_staticimage(id, heading, folder=pano_dir):
    file_name = f"{folder}/{id}.jpg"
    if os.path.exists(file_name):
コード例 #4
0
	data['complaint_date'] =  pd.to_datetime(data['complaint_date'], format='%Y%m%dT%H:%M:%S.%f')
	data['complaint_year'] = data.complaint_date.dt.year
	data = data.drop(['age_of_complainant','assignment','case_type','complaint_month','complaint_day','complaint_hour',
                'current_status','finding_code','police_shooting','sex_of_complainant'],axis=1)
	return data

def create_shapefile(fileloc):
	"""
	Reads the shapefile at the filelocation and returns it as a geopandas shapefile

	:param fileloc: file path of shapefile
	:return: Geopandas shapefile
	"""
    logger.info('Importing shapefile')

	shapefile = gpd.read_file(fileloc)
	shapefile['beat_num'] = pd.to_numeric(shapefile['beat_num'])
	print(type(shapefile))
	return shapefile

def add_race(shapefile, data):
	"""
	Appends aggregated race tabulations of police complaints to a beats-level dataframe

	:param shapefile: Basic beats Shapefile
	:param data: Cleaned and processed police complaints dataset
	:return: Police beats shapefile with aggregated race dataset appended
	"""
    logger.info('Add race aggregates to shapefile')

	race = data[['beat','race_of_complainant']].dropna()
コード例 #5
0
drv = ogr.GetDriverByName("ESRI Shapefile")

srs = osr.SpatialReference()
srs.ImportFromWkt(ds.GetProjectionRef())

out = drv.CreateDataSource('test.shp')
out_layer = out.CreateLayer('new', srs=None)

fd = ogr.FieldDefn("DN", ogr.OFTInteger)
out_layer.CreateField(fd)

gdal.Polygonize(band, None, out_layer, 0, [], callback=None)
out = None

gdf = gpd.read_file('test.shp')

# set up plot
fig1 = plt.figure(1)
ax1 = fig1.add_subplot(111, projection='3d')

fig2, axes = plt.subplots(1, 2, sharex=True, sharey=True)
ax2, ax3 = axes

ax1.plot_surface(X,
                 Y,
                 data,
                 cmap=cm.coolwarm,
                 rstride=1,
                 cstride=1,
                 linewidth=0)
コード例 #6
0
def test_classify():
    # data
    link_to_data = examples.get_path('columbus.shp')
    gdf = gpd.read_file(link_to_data)
    x = gdf['HOVAL'].values

    # box_plot
    a = classify(x, 'box_plot') 
    b = mapclassify.BoxPlot(x)
    _assertions(a, b)

    # EqualInterval
    a = classify(x, "EqualInterval", k=3)
    b = mapclassify.EqualInterval(x, k=3)
    _assertions(a, b)

    # FisherJenks
    a = classify(x, "FisherJenks", k=3)
    b = mapclassify.FisherJenks(x, k=3)
    _assertions(a, b)

    
    a= classify(x, "FisherJenksSampled", k=3, pct_sampled=0.5, truncate=False)
    b = mapclassify.FisherJenksSampled(x, k=3, pct=0.5,truncate=False)
    _assertions(a, b)
    
    # headtail_breaks
    a = classify(x, 'headtail_breaks')
    b = mapclassify.HeadTailBreaks(x)
    _assertions(a, b)
    
    # quantiles
    a = classify(x, 'quantiles',k=3)
    b = mapclassify.Quantiles(x, k=3)
    _assertions(a, b)

    # percentiles
    a = classify(x, 'percentiles', pct=[25,50,75,100])
    b = mapclassify.Percentiles(x, pct=[25,50,75,100])
    _assertions(a, b)

    #JenksCaspall
    a = classify(x, 'JenksCaspall', k=3)
    b = mapclassify.JenksCaspall(x, k=3)
    _assertions(a, b)

    a = classify(x, 'JenksCaspallForced', k=3) 
    b = mapclassify.JenksCaspallForced(x, k=3)
    _assertions(a, b)
    
    a = classify(x, 'JenksCaspallSampled', pct_sampled=0.5)
    b = mapclassify.JenksCaspallSampled(x, pct=0.5)
    _assertions(a, b)
    

    # natural_breaks, max_p_classifier
    a = classify(x, 'natural_breaks')
    b = mapclassify.NaturalBreaks(x)
    _assertions(a, b)

    
    a = classify(x, 'max_p', k=3, initial=50)
    b = mapclassify.MaxP(x, k=3, initial=50)
    _assertions(a, b)
    

    # std_mean
    a = classify(x, 'std_mean', multiples=[-1,-0.5,0.5,1])
    b = mapclassify.StdMean(x, multiples=[-1,-0.5,0.5,1])
    _assertions(a, b)

    
    # user_defined
    a = classify(x, 'user_defined', bins=[20, max(x)]) 
    b = mapclassify.UserDefined(x, bins=[20, max(x)])
    _assertions(a, b)