def polygon2geohash(polygon, precision=6, coarse_precision=None, inner=True): polygon = geometry.shape(polygon.toGeoJSON()) if coarse_precision is None: geohashes = polygon_to_geohashes(geojson, precision=precision, inner=inner) else: geohashes = polygon_to_geohashes(polygon, precision=coarse_precision, inner=inner) curr_precision = coarse_precision while curr_precision < precision: geohashes = [a + b for a, b in product(geohashes, GEOHASH_CHARACTERS)] curr_precision += 1 return sorted(list(geohashes))
def random_choose_geohash(row): #print(type(row)) # generate geohash for the location geohashs = list(polygon_to_geohashes(row,6,False)) # assign one geohash for user/driver geohash = random.choice(geohashs) return geohash
def test_one_geohash(self): test_geohash = "x1" test_polygon = geohash_to_polygon(test_geohash) polygon = geohashes_to_polygon(polygon_to_geohashes(test_polygon, 2)) self.assertEqual(test_polygon.area, polygon.area) self.assertTrue(test_polygon.covers(test_polygon.intersection(polygon))) self.assertTrue(test_polygon.intersection(polygon).covers(test_polygon))
def test_triangle(self): test_polygon = geometry.Polygon([(-99.1795917, 19.432134), (-99.1656847, 19.429034), (-99.1776492, 19.414236), (-99.1795917, 19.432134)]) polygon = geohashes_to_polygon(polygon_to_geohashes(test_polygon, 7)) self.assertTrue(polygon.area <= test_polygon.area) self.assertTrue( all( polygon.covers(geometry.Point(c)) for c in polygon.boundary.coords)) polygon = geohashes_to_polygon( polygon_to_geohashes(test_polygon, 7, False)) self.assertTrue(polygon.area >= test_polygon.area)
def index(): if request.method == 'POST': nelat = float(request.form["nelat"]) nelng = float(request.form["nelng"]) swlat = float(request.form["swlat"]) swlng = float(request.form["swlng"]) precision = int(request.form["precision"]) polygon = geometry.Polygon([[swlng, nelat], [nelng, nelat], [nelng, swlat], [swlng, swlat], [swlng, nelat]]) inner_geohashes_polygon = polygon_to_geohashes(polygon, precision) f = open('file.txt', 'w') maps.total_results = [] for x in np.arange(swlat, nelat, 0.01): for y in np.arange(swlng, nelng, 0.01): maps.get_nearby_places( str(x) + ',' + str(y), 'residential', '') for i in maps.total_results: f.write("%s\n" % str(i)) total_results = maps.total_results df0 = gpd.GeoDataFrame({ 'location': total_results, 'value': np.random.rand(len(total_results)) }) df0['geohash'] = df0['location'].apply( lambda l: geohash.encode(l[0], l[1], precision)) df0['geometry'] = df0['geohash'].apply(geohash_to_polygon) df0.crs = {'init': 'epsg:4326'} overlay0 = df0.groupby('geohash').value.nunique() lat, lng = (nelat, nelng) m = folium.Map((lat, lng), zoom_start=12) folium.Choropleth(geo_data=df0, name='choropleth', data=overlay0, columns=['geohash', 'value'], key_on='feature.properties.geohash', fill_color='BuPu', fill_opacity=0.3, line_opacity=0.2, legend_name='Residential Areas').add_to(m) m.save('templates/map.html') return render_template("map.html") return render_template("prog.html")
def get_locusthub_df(csv_path, geojson_path): ''' clean up raw csv data from locusthub data and subset to only geojson region ''' df = pd.read_csv(csv_path) df['date'] = pd.to_datetime(df.STARTDATE) country = json.load(open(geojson_path))['features'][0] polygon = ee.Geometry.Polygon(country['geometry']['coordinates']) polygon = geometry.shape(polygon.toGeoJSON()) geohashes = polygon_to_geohashes(polygon, precision=5, inner=5) geohashes_country = sorted(list(geohashes)) df['gh'] = df[['Y', 'X']].apply(lambda x: geohash.encode(*x, precision=5), axis=1).values df = df.loc[df.STARTDATE > '2016-01-01'].loc[df['gh'].isin(geohashes_country)] df = df[['gh', 'Y', 'X', 'date']] return df
def identifyLocations(f): from polygon_geohasher.polygon_geohasher import polygon_to_geohashes from shapely import geometry if 'geometry' not in f: return None geom = f['geometry'] if 'type' not in geom: return None geometry_type = geom['type'] if geometry_type != 'Polygon': print('Unknown geometry type:', geometry_type) return None poly_coords = geom['coordinates'][0] return list( polygon_to_geohashes(geometry.Polygon(poly_coords), GEOHASH_ACCURACY, False))
if len(args) >= 5: if args[4] == "--points": points = True fences = read_fences() for precision in range(int(precision_min), int(precision_max) + 1): geohash_num = [] p_area = [] for fence in fences: polygon = geometry.Polygon(fence) real_area = area(create_geojson( mapping(polygon)["coordinates"][0])) / 1000000 geohashes_polygon = polygon_to_geohashes(polygon, precision, False) polygon = geohashes_to_polygon(geohashes_polygon) perc_area = 100 * ( area(create_geojson(mapping(polygon)["coordinates"][0])) / 1000000) / real_area p_area.append(perc_area) geohash_num.append(len(compress(list(geohashes_polygon)))) if points: ps = [] for hash in compress(list(geohashes_polygon)): bbox = geohash.bbox(hash) lat1 = bbox['s'] lat2 = bbox['n'] lon1 = bbox['w']
'''.format(column=code, naturalearth=schema, admin0=table, iso_a3=code3, iso_a2=code2, path_geojson=GEOJSON_PATH)) geojsonpath = os.path.join(GEOJSON_PATH, '{}.geojson'.format(code2)) print(geojsonpath) with open(geojsonpath, 'r', encoding='utf-8') as infile: data = infile.read() g1 = geojson.loads(data) g2 = shape(g1) inner_geohashes_polygon = polygon_to_geohashes(g2, 4) outer_geohashes_polygon = polygon_to_geohashes(g2, 4, False) tree = {} words = [] print(inner_geohashes_polygon) for geohash in inner_geohashes_polygon: words.append(geohash) for geohash in outer_geohashes_polygon: words.append(geohash) make_tree(tree, words) write_tree(tree, code2, '/tmp/trees')
with open("foo.json") as fp: feature = json.load(fp) ## READ: https://github.com/hkwi/python-geohash/wiki/GeohashReference print('Coords:', feature["geometry"]["coordinates"][0][0][1], feature["geometry"]["coordinates"][0][0][0]) h = geohash.encode(feature["geometry"]["coordinates"][0][0][1], feature["geometry"]["coordinates"][0][0][0]) print('Geohash for coordinates[0][0]:', h) h = h[0:3] print('Exact coordinates for:', h, geohash.decode_exactly(h)) print('Bbox for:', h, geohash.bbox(h)) from polygon_geohasher.polygon_geohasher import polygon_to_geohashes from shapely import geometry print( polygon_to_geohashes( geometry.Polygon(feature["geometry"]["coordinates"][0]), 3, False)) ### # #print 'Geohash for 42.6, -5.6:', geohash.encode(42.6, -5.6) # #print 'Coordinate for Geohash ezs42:', geohash.decode('ezs42') # #print 'Exact coordinate for Geohash ezs42:\n', geohash.decode_exactly('ezs42')
def __init__(self, geo): print(geo) poly = shapely.geometry.shape(geo.geometry) self.hashes = polygon_to_geohashes(poly, 3, inner=False) self.geo = geo
if __name__ == '__main__': from polygon_geohasher.polygon_geohasher import polygon_to_geohashes, geohashes_to_polygon from shapely import geometry polygon = geometry.Polygon([(-39.1795917, 19.432134), (-39.1656847, 19.429034), (-39.1776492, 19.414236), (-39.1795917, 19.432134)]) inner_geohashes_polygon = geohashes_to_polygon( polygon_to_geohashes(polygon, 7)) print(inner_geohashes_polygon) outer_geohashes_polygon = geohashes_to_polygon( polygon_to_geohashes(polygon, 7, False))
def polygon2geohash(polygon, precision=6, coarse_precision=None, inner=True): polygon = geometry.shape(polygon.toGeoJSON()) geohashes = polygon_to_geohashes(polygon, precision=coarse_precision, inner=inner) return sorted(list(geohashes))
def main(): args = parse_args() # load the csv file into pandas for cleanup print('Loading...') df = pd.read_csv(args.input_file) # filter down to area of interest records print('Finding AoI...') geohashes_aoi = set() if args.coverage_file is not None: # loading coverage polygon from geo json file coverage_geojson = json.load(open(args.coverage_file)) # generate geohashes covered by the AoI geohashes_aoi = helpers.geohashes_from_geojson_poly( coverage_geojson, precision=args.geohash_level) # filter down to country of interest records elif args.country_iso is not None: df = df.loc[df["iso3"] == args.country_iso] # extract x, y locations and crop of interest df = df[(["x", "y"] + args.crop_columns)] df = df.reset_index() # loop over the x, y which are the cell centroids, and generate a bounding box based on # the cell size (taken from the associated geotiff resolution) print('Converting points to bounds...') centroids = zip(df["x"], df["y"]) bounds = [ geometry.box( c[0] - CELL_SIZE_X / 2, c[1] - CELL_SIZE_Y / 2, c[0] + CELL_SIZE_X / 2, c[1] + CELL_SIZE_Y / 2, ) for c in tqdm(centroids) ] # loop through the bounds we've created and intersect each with the intended geohash grid print('Converting bounds to geohashes...') geohashes = [ polygon_geohasher.polygon_to_geohashes(b, precision=args.geohash_level, inner=False) for b in tqdm(bounds) ] # flatten gh set for each cell preserving index - no clean way to do this in pandas flattened_gh = [] print('Clipping geohashes to AoI...') for idx, gh_set in tqdm(enumerate(geohashes)): for gh in gh_set: if (len(geohashes_aoi) > 0 and gh in geohashes_aoi) or len(geohashes_aoi) is 0: bounds_str = helpers.geohash_to_array_str(gh) flattened_gh.append((idx, gh, bounds_str)) # store as a dataframe with any geohashes that were part of 2 cells reduced to 1 # a better implementation of this would take the value of both cells into account and # compute a final adjusted value for the given geohash print('Genering output csv...') geohash_df = pd.DataFrame(flattened_gh, columns=["cell", "geohash", "bounds"]) geohash_df = geohash_df.drop_duplicates(subset="geohash", keep="first") geohash_df = geohash_df.set_index("cell") joined = pd.merge(df, geohash_df, left_index=True, right_index=True) joined = joined.drop(columns=["x", "y", "index"]) joined.to_csv(args.output_file, index=False)