def nearby(): print 'geohash.bounding box' print geohash.bbox('tdr1w') print 'geohash neighbours' print geohash.neighbors('tdr1wxype953') print 'geohash expand' print geohash.expand('tdr1wxype953')
def gh_expansion(seed_gh,exp_iters): expansion_ghs = {0:[seed_gh]} ghs = [] for i in range(1,exp_iters+1): expansion_ghs[i] = [] for gh in expansion_ghs[i-1]: expansion_ghs[i] = expansion_ghs[i] + geohash.expand(gh) ghs = ghs + geohash.expand(gh) return list(set(ghs))
def gh_expansion(seed_gh, exp_iters): expansion_ghs = {0: [seed_gh]} ghs = [] for i in range(1, exp_iters + 1): expansion_ghs[i] = [] for gh in expansion_ghs[i - 1]: expansion_ghs[i] = expansion_ghs[i] + geohash.expand(gh) ghs = ghs + geohash.expand(gh) return list(set(ghs))
def check_line(cls, loc1, loc2, userid): "Check if the given line intersects with a camera" # get all cameras in the neighbourhood hashes = geohash.expand(geohash.encode(loc1.lat, loc1.lon, 7)) if str(loc1) != str(loc2): hashes.extend(geohash.expand(geohash.encode(loc2.lat, loc2.lon, 7))) sets = ["camloc:" + h for h in hashes] cams = rd.sunion(sets) for camstring in cams: cam = Camera(camstring) rate = cam.check_camera_line(loc1, loc2, userid) return False
def get_moving_time(geohash1, geohash2): try: if geohash1 == None or geohash2 == None: # 바로 나오는 경우, 들어가는 경우 return 0 if geohash1 == '' or geohash2 == '': # 모를 경우에는 1시간을 줌 return 60 geohash1_parent = geohash1[:5] geohash2_parent = geohash2[:5] expanded = geohash.expand(geohash1_parent) expanded_depth2 = set() for gh in expanded: neighbors = geohash.neighbors(gh) expanded_depth2 = expanded_depth2.union(neighbors) expanded_depth2 = list(expanded_depth2) if geohash1 == geohash2: return 30 elif geohash2_parent in expanded: return 60 elif geohash2_parent in expanded_depth2: return 90 else: return 120 except Exception, e: print_err_detail(e) return 60
def article_id_by_geo(self, content_type=0, poi_longitude=None, poi_latitude=None, poi_distance=None): filters = [not Article.deleted] if content_type: filters.append(Article.content_type == content_type) if poi_longitude and poi_latitude and poi_distance: precision = utils.geo_precision_by_distance(poi_distance) poi_hash = geohash.encode(poi_latitude, poi_longitude, precision) poi_extend = geohash.expand(poi_hash) filters.append( sqla.or_(*[ Article.geo_hash.like(poi_extend_hash + "%") for poi_extend_hash in poi_extend ])) id_with_geo_list = Article.query.with_entities(Article.id, Article.latitude, Article.longitude). \ filter(*filters).all() if poi_longitude and poi_latitude and poi_distance: temp_list = filter( lambda x: utils.get_distance_hav(poi_latitude, poi_longitude, x[1], x[2]) <= poi_distance, id_with_geo_list) id_list = [id_with_geo[0] for id_with_geo in temp_list] else: id_list = [id_with_geo[0] for id_with_geo in id_with_geo_list] return id_list
def get_nearest_points_dirty(self, center_point, radius, unit='km'): """ return approx list of point from circle with given center and radius it uses geohash and return with some error (see GEO_HASH_ERRORS) :param center_point: center of search circle :param radius: radius of search circle :return: list of GeoPoints from given area """ if unit == 'mi': radius = utils.mi_to_km(radius) grid_size = GEO_HASH_GRID_SIZE[self.precision] if radius > grid_size / 2: # radius is too big for current grid, we cannot use 9 neighbors # to cover all possible points suggested_precision = 0 for precision, max_size in GEO_HASH_GRID_SIZE.items(): if radius > max_size / 2: suggested_precision = precision - 1 break raise ValueError( 'Too large radius, please rebuild GeoHashGrid with ' 'precision={0}'.format(suggested_precision) ) me_and_neighbors = geohash.expand(self.get_point_hash(center_point)) return chain(*(self.data.get(key, []) for key in me_and_neighbors))
def check_line(cls, loc1, loc2, userid): "Check if the given line intersects with a camera" # get all cameras in the neighbourhood hashes = geohash.expand(geohash.encode(loc1.lat, loc1.lon, 7)) if str(loc1) != str(loc2): hashes.extend(geohash.expand(geohash.encode(loc2.lat, loc2.lon, 7))) sets = ['camloc:' + h for h in hashes] cams = rd.sunion(sets) for camstring in cams: cam = Camera(camstring) rate = cam.check_camera_line(loc1, loc2, userid) return False
def _get_named_neighbors(self, gh): ghs = {} gh_bbox = geohash.bbox(gh) for g in geohash.expand(gh): if gh == g: continue b = geohash.bbox(g) if gh_bbox['n'] == b['n'] and gh_bbox['w'] == b["e"]: ghs['L'] = g elif gh_bbox['n'] == b['n'] and gh_bbox['e'] == b["w"]: ghs['R'] = g elif gh_bbox['e'] == b['e'] and gh_bbox['n'] == b["s"]: ghs['U'] = g elif gh_bbox['e'] == b['e'] and gh_bbox['s'] == b["n"]: ghs['D'] = g elif gh_bbox['n'] == b['s'] and gh_bbox['w'] == b['e']: ghs['LU'] = g elif gh_bbox['n'] == b['s'] and gh_bbox['e'] == b['w']: ghs['RU'] = g elif gh_bbox['s'] == b['n'] and gh_bbox['w'] == b['e']: ghs['LD'] = g elif gh_bbox['s'] == b['n'] and gh_bbox['e'] == b['w']: ghs['RD'] = g return ghs
def search_geohash(self, east_longitude, north_latitude, bits=6): # 打开数据库连接 data = [] geohash_source = geohash.encode(north_latitude, east_longitude, bits) # print geohash_source geohash_value_list = geohash.expand(geohash_source) # geohash_value_list.append(geohash_source) db = MySQLdb.connect("localhost", "root", "root", "citydb") # 使用cursor()方法获取操作游标 cursor = db.cursor() # SQL 插入语句 for geohash_value in geohash_value_list: sql = "SELECT * FROM CITYLIST WHERE GEOHASH LIKE '{}%'".format(geohash_value) try: # 执行sql语句 cursor.execute(sql) # 提交到数据库执行 results = cursor.fetchall() for result in results: data.append(result) # print data_json except: # 发生错误时回滚 print "Error: unable to fecth data" # 关闭数据库连接 db.close() if len(data) < 3: return self.search_geohash(east_longitude, north_latitude, bits - 1) else: return data
def expand(self, hashes): new = [] for h in hashes: neighbors = geohash.expand(h) for n in neighbors: if n not in self.fetched: new.append(n) return new
def _get_hits(self, center_hash): hits = [] for hash_ in geohash.expand(center_hash): try: hits.extend(self.trie.values_for_prefix(hash_)) except KeyNotFound: pass return hits
def get_near_points(self, center_point, radius=2000): """A cheap filter that fetchs all points of 4 neighbor grids that are within a circle generated from given center point and radius. : param center_point: a center point : param radius: a radius from a center point. 2000 by default. """ me_and_neighbors = geohash.expand(self.get_point_hash(center_point)) return chain(*(self.data.get(key, []) for key in me_and_neighbors))
def update_location(user_id, lat, lng): user_geohash = geohash.encode(lat, lng, precision=5) user_key = "users:%s" % user_id last_hash = r.hget(user_key, "user_geohash") r.decr("count:%s" % last_hash) for key in geohash.expand(user_geohash): if (r.sismember("geohashes", key)): r.incr("count:%s" % key) r.hset(user_key, "user_geohash", key) break
def _get_area_demand(self, df): df_area_demand = [] print("Aggregating demand.") for ghash in tqdm(df.geohash6.unique()): area_codes = geohash.expand(ghash) df_temp = (df[df.geohash6.isin(area_codes)].groupby( 'timestamp', as_index=False).demand.sum().rename( { 'demand': 'area_demand' }, axis=1).assign(geohash6=ghash)) df_area_demand.append(df_temp) return pd.concat(df_area_demand, sort=False)
def main(): args = parse_args() # get country geometry country = json.load(open(args.country_geojson))['features'][0] polygon = ee.Geometry.Polygon(country['geometry']['coordinates']) geohashes_country = polygon2geohash(polygon, precision=5, coarse_precision=5) # Get locations of sightings, and restrict to AOI df = pd.read_csv(args.hopper_csv) df['geohash'] = df[['Y', 'X']].apply(lambda x: geohash.encode(*x, precision=5), axis=1).values df = df.loc[df.STARTDATE > args.start_date].loc[df['geohash'].isin(geohashes_country)] df['STARTDATE'] = pd.to_datetime(df.STARTDATE) # Encode locations as geohashes and get surrounding geohashes gh = set(df['geohash']) for _ in range(30): for g in list(gh): gh |= set(geohash.expand(g)) gh = list(gh.intersection(geohashes_country)) random.shuffle(gh) gh = gh[:len(gh) // 3] gh.extend(list(df['geohash'])) gh = list(set(gh)) # Prepare to load data os.makedirs(args.outdir, exist_ok=True) # Get all geohashes of interest for around date where a hopper sighting occurs interval = 30 delta = date.fromisoformat('2020-06-01') - date.fromisoformat(args.start_date) locs = [] for i in range(int(delta.days/30)): start_date = date.fromisoformat(args.start_date) + timedelta(days=i*interval) end_date = start_date + timedelta(days=interval) for i in range(len(gh)): locs.append({'date_start': str(start_date), 'date_end': str(end_date), 'geohash': gh[i]}) # Run jobs in parallel jobs = [] for loc in locs: job = delayed(get_one_sentinel)(loc, outdir=args.outdir) jobs.append(job) random.shuffle(jobs) _ = Parallel(backend='multiprocessing', n_jobs=args.n_jobs, verbose=1, batch_size=4)(tqdm(jobs))
def lookup(): """Calculate ten closest stations from input latitude and longitude, return data for ten closest stations as a JSON object.""" # From Google maps API: l = request.values.get("lat", 0, type=float) g = request.values.get("lng", 0, type=float) session["location"] = {"input":(l,g)} # Geohash encode the input, then determine the expanded neighborhood based on expanded geohash reference_location = geohash.encode(l, g) location_box = geohash.expand(reference_location[:3]) neighborhoods = [] for place in location_box: geohash_str = place + '%' neighbor = dbsession.query(model.Station_Geohash).\ select_from(model.Station_Geohash).\ filter(model.Station_Geohash.geohash_loc.ilike(geohash_str)).\ all() neighborhoods = neighborhoods + neighbor dist_list = [] # For all of the stations found in neighborhoods, check for data and snow. # If there is data and snow for a given station, add it to the heap for location in neighborhoods: try: station = dbsession.query(model.Station).filter(model.Station.id == location.station_id).one() snow = station.snow_data[-1] origin = float(l), float(g) destination = float(station.latitude), float(station.longitude) kms = int(distance(origin, destination)) mi = int(0.621371*kms) if snow.depth != None and snow.depth > 0: if snow.water_equiv != None and snow.water_equiv != 0: density = (int((snow.water_equiv / snow.depth) * 100)) if density > 100: density = 100 else: density = "No Data" dist_list.append({'dist':mi, 'text-code':station.id, 'id':station.given_id, 'ele':station.elevation,\ 'lat':station.latitude, 'lng':station.longitude, 'name':station.name, 'depth':snow.depth,\ 'depth_change':snow.depth_change, 'density':density, 'date':snow.date.strftime("%m/%d/%y %H:%M")}) else: continue except IndexError: continue # Return the 10 closest stations, their distances away in miles (converted from kms) # and basic telemetry data for that station closest_sta = sorted(dist_list, key=lambda k: k['dist'])[0:10] time_stamps = [x['date'] for x in closest_sta] time_stamp = max(time_stamps) response = json.dumps({"closest": closest_sta, "time_stamp":time_stamp}) return response
def compute_geohash_key(geoh, with_neighbors=True): if with_neighbors: neighbors = geohash.expand(geoh) neighbors = [dbkeys.geohash_key(n) for n in neighbors] else: neighbors = [geoh] key = 'gx|{}'.format(geoh) total = DB.sunionstore(key, neighbors) if not total: # No need to keep it. DB.delete(key) key = False else: DB.expire(key, 10) return key
def compute_geohash_key(geoh, with_neighbors=True): if with_neighbors: neighbors = geohash.expand(geoh) neighbors = [keys.geohash_key(n) for n in neighbors] else: neighbors = [geoh] key = 'gx|{}'.format(geoh) total = DB.sunionstore(key, neighbors) if not total: # No need to keep it. DB.delete(key) key = False else: DB.expire(key, 10) return key
def get_labels(df, df_label, label_name='hoppers', n_neighbor=0): df[label_name] = 0 for row in df_label.iterrows(): start_day = row[1].date end_day = start_day + timedelta(days=30) gh = set([row[1].gh]) if n_neighbor > 0: for _ in range(n_neighbor): for g in list(gh): gh |= set(geohash.expand(g)) gh = list(gh) idx = df[label_name].loc[df['geohash'].isin(gh)].loc[ df['date'] >= start_day].loc[df['date'] < end_day].index.values df[label_name].iloc[idx] = 1 return df
def geohash_and_neighbours(gh, neighbours_deepth=1): ''' >>> geohash_and_neighbours('bg4r', neighbours_deepth=0) set(['bg4r']) >>> sorted(geohash_and_neighbours('bg4r')) ['bg4n', 'bg4p', 'bg4q', 'bg4r', 'bg4w', 'bg4x', 'bg60', 'bg62', 'bg68'] >>> sorted(geohash_and_neighbours('bg4r', neighbours_deepth=2)) ['bg1v', 'bg1y', 'bg1z', 'bg3b', 'bg3c', 'bg4j', 'bg4m', 'bg4n', 'bg4p', 'bg4q', 'bg4r', 'bg4t', 'bg4v', 'bg4w', 'bg4x', 'bg4y', 'bg4z', 'bg60', 'bg61', 'bg62', 'bg63', 'bg68', 'bg69', 'bg6b', 'bg6c'] ''' # some neighbours are calculated many times, but for performance this makes almost no difference ghs = set([gh]) for i in range(neighbours_deepth): for gh in tuple(ghs): # tuple because we want a copy from it ghs.update(geohash.expand(gh)) return ghs
def geohash_and_neighbors(gh, neighbors_deepth=1): ''' >>> geohash_and_neighbors('bg4r', neighbors_deepth=0) set(['bg4r']) >>> sorted(geohash_and_neighbors('bg4r')) ['bg4n', 'bg4p', 'bg4q', 'bg4r', 'bg4w', 'bg4x', 'bg60', 'bg62', 'bg68'] >>> sorted(geohash_and_neighbors('bg4r', neighbors_deepth=2)) ['bg1v', 'bg1y', 'bg1z', 'bg3b', 'bg3c', 'bg4j', 'bg4m', 'bg4n', 'bg4p', 'bg4q', 'bg4r', 'bg4t', 'bg4v', 'bg4w', 'bg4x', 'bg4y', 'bg4z', 'bg60', 'bg61', 'bg62', 'bg63', 'bg68', 'bg69', 'bg6b', 'bg6c'] ''' # some neighbors are calculated repeatedly, that's ok for performance ghs = set([gh]) for i in range(neighbors_deepth): for gh in tuple(ghs): # tuple because we want to copy ghs ghs.update(geohash.expand(gh)) return ghs
def get_neighbours(cls, lat, lng, radius, tags): # get length of geohash for the required radius geohash_length = 12 for to_remove, accuracy in sorted(GEOHASH_CHARS_TO_DISTANCE.items()): if accuracy < radius: geohash_length = ( to_remove - 1 ) break query = [] param = [] result = [] # get encoding of current lat, lng geohash_code = encode(lat, lng, geohash_length) # get neighbours of current geohash accoridng to radius # and generate sql statements for them # the choice to run the sql statements as literal SQL # and not through the ORM was a concious decision since # I did not want all these objects to be created just # and pass through the layers of the ORM, this is a performance # critical method and no need for that overhead. for prefix in expand(geohash_code): query.append("geohash LIKE ?") param.append(prefix+"%") query_text = "SELECT DISTINCT(shop.id), latitude, longitude FROM shop LEFT JOIN tagging"+\ " ON shop.id = tagging.shop_id WHERE (" +\ " OR ".join(query)+ ")" # include tags in the search if tags: query_text += " AND tag_id IN (%s) AND tagging.id IS NOT NULL" % ','.join('?' * len(tags)) param += tags # manually filter the outliers after that do not # fall within the exact radius, but fall within the geohash # neighbours orig = GeopyPoint(latitude=lat, longitude=lng) for row in db.engine.execute(query_text, param): shop_as_point = GeopyPoint(latitude=row[1], longitude=row[2]) if distance(orig, shop_as_point).meters <= radius: result.append(row[0]) return result
def __get_search_region_geohashes(self): if self.unit == 'mi': self.radius = utils.mi_to_km(self.radius) grid_size = GEO_HASH_GRID_SIZE[self.precision] if self.radius > grid_size / 2: # radius is too big for current grid, we cannot use 9 neighbors # to cover all possible points suggested_precision = 0 for precision, max_size in GEO_HASH_GRID_SIZE.items(): if self.radius > max_size / 2: suggested_precision = precision - 1 break raise ValueError( 'Too large radius, please rebuild GeoHashGrid with ' 'precision={0}'.format(suggested_precision)) search_region_geohashes = geohash.expand( self.get_point_hash(self.center_point)) return search_region_geohashes
def get_surroundings_grid(geo_hash, levels): # should at least return one geo hash and its surroundings if levels == 0: levels = 1 grid_hashes = dict() grid_hashes[geo_hash] = True count = 1 # levels is proportional to the radius to get the size of the patch while (count <= levels): grid_hashes_new = dict() for cell in grid_hashes.keys(): surroundings = geohash.expand(cell) grid_hashes_new[cell] = True for newcell in surroundings: grid_hashes_new[newcell] = True grid_hashes = grid_hashes_new count = count + 1 return grid_hashes.keys()
def geohash_neighbors(geohashstr): return {'$in': geohash.expand(geohashstr)}
def expand(self): return [Geohash(hash) for hash in geohash.expand(self)]
def get(self): motordb = self.settings['motordb'] print 'processing request' ### Process Bounds and Center arguments view_bounds_str = self.get_argument("bounds") view_center_str = self.get_argument("center") view_zoom_str = self.get_argument("zoom") view_zoom = int(view_zoom_str) if not view_bounds_str or not view_center_str: self.write([]) self.finish() return view_bounds_south, view_bounds_west, view_bounds_north, view_bounds_east = [float(x) for x in view_bounds_str.split(',')] view_center_lat, view_center_long = [float(x) for x in view_center_str.split(',')] view_bounds_width = abs(view_bounds_north - view_bounds_south) view_bounds_height = abs(view_bounds_east - view_bounds_west) #Do not use spacial reference system so that calculations are faster.. not needed view_bounds_ring = osgeo.ogr.Geometry(osgeo.ogr.wkbLinearRing) view_bounds_ring.TransformTo(WGS_84) view_bounds_ring.AddPoint(view_bounds_west, view_bounds_north) view_bounds_ring.AddPoint(view_bounds_east, view_bounds_north) view_bounds_ring.AddPoint(view_bounds_east, view_bounds_south) view_bounds_ring.AddPoint(view_bounds_west, view_bounds_south) view_bounds_ring.AddPoint(view_bounds_west, view_bounds_north) view_bounds_geom = osgeo.ogr.Geometry(osgeo.ogr.wkbPolygon) view_bounds_geom.TransformTo(WGS_84) view_bounds_geom.AddGeometry(view_bounds_ring) view_bounds_area = view_bounds_geom.Area() view_center_hash = geohash.encode(view_center_lat, view_center_long, precision=32) ###print "VIEW %.64f" % view_bounds_area possible_hashes = set(list('0123456789bcdefghjkmnpqrstuvwxyz')) if view_zoom == 5: end_precision = 0 if view_zoom == 6: end_precision = 0 if view_zoom == 7: end_precision = 1 if view_zoom == 8: end_precision = 2 if view_zoom == 9: end_precision = 2 if view_zoom == 10: end_precision = 3 if view_zoom == 11: end_precision = 4 if view_zoom == 12: end_precision = 5 if view_zoom == 13: end_precision = 6 if view_zoom == 14: end_precision = 6 if view_zoom == 15: end_precision = 7 end_precision = (view_zoom / 3) #perfect if end_precision > PRECISION: end_precision = PRECISION if end_precision < 0: end_precision = 0 if view_zoom < 8: end_precision = 0 ##print end_precision, '!!!' for precision in range(1,end_precision+1): new_possible_hashes = set([]) for possible_hash in possible_hashes: possible_hash_bbox = geohash.bbox(possible_hash) #Do not use spacial reference system possible_hash_ring = osgeo.ogr.Geometry(osgeo.ogr.wkbLinearRing) possible_hash_ring.TransformTo(WGS_84) possible_hash_ring.AddPoint(possible_hash_bbox['w'], possible_hash_bbox['n']) possible_hash_ring.AddPoint(possible_hash_bbox['e'], possible_hash_bbox['n']) possible_hash_ring.AddPoint(possible_hash_bbox['e'], possible_hash_bbox['s']) possible_hash_ring.AddPoint(possible_hash_bbox['w'], possible_hash_bbox['s']) possible_hash_ring.AddPoint(possible_hash_bbox['w'], possible_hash_bbox['n']) possible_hash_geom = osgeo.ogr.Geometry(osgeo.ogr.wkbPolygon) possible_hash_geom.TransformTo(WGS_84) possible_hash_geom.AddGeometry(possible_hash_ring) possible_hash_geom_intersection = view_bounds_geom.Intersection(possible_hash_geom) possible_hash_geom_intersection.TransformTo(WGS_84) possible_hash_area = possible_hash_geom_intersection.Area() if possible_hash_area or view_center_hash.startswith(possible_hash): ##print "!!!!", possible_hash, view_center_hash for hash_char in '0123456789bcdefghjkmnpqrstuvwxyz': new_possible_hashes.add(possible_hash + hash_char) possible_hashes = new_possible_hashes new_possible_hashes = set([]) new_possible_grandparent_hashes = set([]) for possible_hash in possible_hashes: possible_hash_bbox = geohash.bbox(possible_hash) #Do not use spacial reference system possible_hash_ring = osgeo.ogr.Geometry(osgeo.ogr.wkbLinearRing) possible_hash_ring.TransformTo(WGS_84) possible_hash_ring.AddPoint(possible_hash_bbox['w'], possible_hash_bbox['n']) possible_hash_ring.AddPoint(possible_hash_bbox['e'], possible_hash_bbox['n']) possible_hash_ring.AddPoint(possible_hash_bbox['e'], possible_hash_bbox['s']) possible_hash_ring.AddPoint(possible_hash_bbox['w'], possible_hash_bbox['s']) possible_hash_ring.AddPoint(possible_hash_bbox['w'], possible_hash_bbox['n']) possible_hash_geom = osgeo.ogr.Geometry(osgeo.ogr.wkbPolygon) possible_hash_geom.TransformTo(WGS_84) possible_hash_geom.AddGeometry(possible_hash_ring) possible_hash_geom_intersection = view_bounds_geom.Intersection(possible_hash_geom) possible_hash_geom_intersection.TransformTo(WGS_84) possible_hash_area = possible_hash_geom_intersection.Area() if possible_hash_area or view_center_hash.startswith(possible_hash): new_possible_hashes.update(geohash.expand(possible_hash)) new_possible_grandparent_hashes.add(possible_hash[0:-1]) possible_hashes = new_possible_hashes centroids = [] for hash in sorted(list(possible_hashes)): _lat, _long = geohash.decode(hash) centroids.append({ 'hash': hash, 'arg': True, 'lat': _lat, 'long': _long, }) lots = [] regions = [] region_set = set([]) query = {'parent': {'$in': list(possible_hashes)}} cursor = motordb.lots.find(query) print query self.update_region_cache() while (yield cursor.fetch_next): lot = cursor.next_object() lot['lot'] = True lot['bbox'] = geohash.bbox(lot['hash']) outline = osgeo.ogr.Geometry(wkb=str(lot['geom']['outline'])) outline.TransformTo(WGS_84) lot['geom']['outline'] = json.loads(osgeo.ogr.ForceToPolygon(outline).ExportToJson()) #geom = osgeo.ogr.ForceToPolygon(osgeo.ogr.Geometry(json.dumps(lot['geom']['outline'])).ConvexHull()) geom = outline.ConvexHull() if view_bounds_geom.Contains(geom) or view_bounds_geom.Intersects(geom) or view_zoom == 5: region_set.add(lot['region']['_id']) if view_zoom != 5: lots.append(lot) for region_oid in region_set: region = copy.deepcopy(self.settings['cache']['region']['map']['_id'][region_oid]) region['region'] = True outline = osgeo.ogr.Geometry(wkb=str(region['geom']['outline'])) outline.TransformTo(WGS_84) region['geom']['outline'] = json.loads(osgeo.ogr.ForceToPolygon(outline).ExportToJson()) regions.append(region) self.write(bson.json_util.dumps(sorted(regions, key=lambda x: x['order']) + lots)) self.finish() return
def test_geohash(self): self.assertEqual(geohash.encode(47.6097, -122.3331), 'c23nb5pf85m4') self.assertEqual(geohash.expand('c23'), ['c22', 'c26', 'c28', 'c29', 'c2d', 'c20', 'c21', 'c24', 'c23'])
pre_num += 1 if username in user_start_end_dict: log("user_start_end_dict[username]", user_start_end_dict[username]) log("user_start_dict[username]", user_start_dict[username]) log("user_end_dict[username]", user_end_dict[username]) fp_user_start_end = fp_growth.generate(user_start_end_dict[username], 1, 2) user_start_list = [] user_end_list = [] for j, val in enumerate(user_start_dict[username]): user_start_list.extend([[item] for item in geohash.expand(val[0])]) for j, val in enumerate(user_end_dict[username]): user_end_list.extend([[item] for item in geohash.expand(val[0])]) log("user_start_list", user_start_list) log("user_end_list", user_end_list) fp_user_start = fp_growth.generate(user_start_list, 2, 0) fp_user_end = fp_growth.generate(user_end_list, 2, 0) if ori in start_end_dict: log("start_end_dict[ori]", start_end_dict[ori]) # start_end_list = [] # for j, val in enumerate(start_end_dict[ori]): # start_end_list.extend([[item] for item in geohash.expand(val[0])])
def SC_population(self,node_gh):#function uses geohash precision of 3 (ie radius of 73km) and sums population within this radius total_close_pop = (sum([data['population'] for gh,data in POP_DICT.items() if gh[0:3] in geohash.expand(node_gh[0:3])])) return total_close_pop