Ejemplo n.º 1
0
def generate_RSU_arr(x, y):
    target_area = GLOBAL_VARS.EXTENDED_DOWNTOWN_NASH_POLY
    # Polys are just the larger boundaries to be used to "decrease" the number since we still use geohashing and it is still limited.
    polys = gb.divide_grid(target_area, (x, y))
    rsu_arr = []
    for i in range(x):
        for j in range(y):
            idx = j + (i * y)
            p = polys[idx]
            gid = ghh.encode(p.centroid.x, p.centroid.y, precision=6)
            r = ag.adjustable_RSU(gid, p, (i, j))
            r.set_max_size(x, y)
            rsu_arr.append(r)

    if not os.path.exists(os.path.join(os.getcwd(), 'data')):
        raise OSError("Must first download data, see README.md")
    data_dir = os.path.join(os.getcwd(), 'data')

    file_path = os.path.join(data_dir, '{}-{}-grids_df.pkl'.format(x, y))
    with open(file_path, 'rb') as handle:
        df = pd.read_pickle(handle)

    sub_grid_rsus = []
    TMC_THRESH = df['tmc_count'].quantile(0.75)
    DIV_X, DIV_Y = 2, 2
    for i, row in df.iterrows():
        gid = row['grid_id']
        tmc_count = row['tmc_count']
        if tmc_count > TMC_THRESH:
            # BUG: If DIV_X and DIV_Y are odd, the central grid has the same gid as the parent grid
            #         if tmc_count / (DIV_X * DIV_Y) > TMC_THRESH:
            #             DIV_X, DIV_Y = 3, 3

            r = geo_utils.get_rsu_by_grid_id(rsu_arr, gid)
            r_poly = r.poly
            sub_polys = divide_grid_temp(r_poly, (DIV_X, DIV_Y))

            for p in sub_polys:
                new_gid = ghh.encode(p.centroid.x, p.centroid.y, precision=6)
                new_r = ag.adjustable_RSU(new_gid, p, (1000, 1000))
                new_r.queue_limit = GLOBAL_VARS.QUEUE_THRESHOLD
                new_r.set_max_size(x, y)
                sub_grid_rsus.append(new_r)
                # print("Adding: {}".format(new_gid))
                r.add_sub_grid(new_r)

    # main_no_sub_polys = [r.poly for r in rsu_arr if not r.get_sub_grids()]
    # sub_grid_polys = [r.poly for r in sub_grid_rsus]

    # SUB_GRIDS_DICT = {}
    # for r in rsu_arr:
    #     if r.get_sub_grids():
    #         SUB_GRIDS_DICT[r.grid_id] = [sg.grid_id for sg in r.get_sub_grids()]

    file_path = os.path.join(data_dir, "{}-{}-rsu_arr.pkl".format(x, y))
    with open(file_path, 'wb') as handle:
        pickle.dump(rsu_arr, handle)

    utils.print_log("Generated rsu_arr for broker.")
Ejemplo n.º 2
0
    def __neighbour_bfs(self, poly: Polygon, precision) -> List[str]:
        centroid = poly.centroid
        gh = ghh.encode(*(centroid.coords[0]), precision=precision)
        overlaps = []
        q1 = FifoQueue()
        q1.push(gh)
        q2 = FifoQueue()
        visited = dict()
        discovered = dict()
        level_active = False

        while not q1.empty():
            node = q1.pop()
            if node not in visited.keys():
                visited[node] = True
                discovered[node] = True
                node_poly = shape(ghh.rectangle(node)["geometry"])
                if node_poly.intersects(poly):
                    overlaps.append(node)
                    level_active = True

            next_level = list(ghh.neighbours(node).values())
            for nbr in next_level:
                if nbr not in discovered.keys():
                    discovered[nbr] = True
                    q2.push(nbr)
            if q1.empty() and level_active:
                level_active = False
                q2, q1 = q1, q2

        return overlaps
Ejemplo n.º 3
0
    def search_all(self, lng, lat):
        '''Reverse geocode lng/lat coordinate within the features from `self.shapes`.

        Look within the features from `self.shapes` for all polygon that
        contains the point (lng, lat). From all found feature the `porperties`
        will be returned (more or less sorted from smallest to largest feature).
        `None`, if no feature containes the point.

        Parameters:
            lng: float  Longitude (-180, 180) of point. (WGS84)
            lat: float  Latitude (-90, 90) of point. (WGS84)

        Returns:
            Iterator[Dict[Any, Any]]  Iterator for `properties` of found features.
        '''
        if not (-180 <= lng <= 180):
            raise ValueError('Longitude must be between -180 and 180.')
        if not (-90 <= lat <= 90):
            raise ValueError('Latitude must be between -90 and 90.')

        key = encode(lng=lng, lat=lat, precision=16, bits_per_char=4)
        for sub_key in [key] + [key[:-i] for i in range(1, len(key) + 1)]:
            # look withing geohash rectangles of increasing resolution
            for shp in self.shapes.get(sub_key, []):
                # look through all shapes within one resolution
                if in_bbox((lng, lat), shp['bounds']):
                    # first check if point in bbox
                    if p_in_polygon((lng, lat), shp):
                        # ensure point is in polygon
                        yield shp['properties']
Ejemplo n.º 4
0
def test_rectangle(bpc, prec):
    code = encode(rand_lng(), rand_lat(), bits_per_char=bpc, precision=prec)
    lng, lat, lng_err, lat_err = decode_exactly(code, bits_per_char=bpc)

    rect = utils.rectangle(code, bits_per_char=bpc)

    assert isinstance(rect, dict)
    assert rect['type'] == 'Feature'
    assert rect['geometry']['type'] == 'Polygon'
    assert rect['bbox'] == (lng - lng_err, lat - lat_err, lng + lng_err,
                            lat + lat_err)
    assert rect['properties'] == dict(
        code=code,
        lng=lng,
        lat=lat,
        lng_err=lng_err,
        lat_err=lat_err,
        bits_per_char=bpc,
    )

    coords = rect['geometry']['coordinates']
    assert 1 == len(coords)  # one external ring
    assert 5 == len(coords[0])  # rectangle has 5 coordinates

    # ccw
    assert (lng - lng_err, lat - lat_err) == coords[0][0]  # lower left
    assert (lng + lng_err, lat - lat_err) == coords[0][1]  # lower right
    assert (lng + lng_err, lat + lat_err) == coords[0][2]  # upper right
    assert (lng - lng_err, lat + lat_err) == coords[0][3]  # upper left
    assert (lng - lng_err, lat - lat_err) == coords[0][4]  # lower left
Ejemplo n.º 5
0
def test_neighbours(bpc, prec):
    for i_ in range(100):
        code = encode(rand_lng(), rand_lat(), bits_per_char=bpc, precision=prec)
        lng, lat, lng_err, lat_err = decode_exactly(code, bits_per_char=bpc)
        neighbours = utils.neighbours(code, bpc)

        directions = {'north', 'north-east', 'north-west', 'east',
                      'west', 'south', 'south-east', 'south-west'}

        assert directions == set(neighbours.keys())

        # no duplicates (depends on level)
        assert len(neighbours) == len(set(neighbours.values()))

        for k, v in neighbours.items():
            n_lng, n_lat, n_lng_err, n_lat_err = decode_exactly(v, bits_per_char=bpc)

            # same level
            assert len(code) == len(v)
            assert lng_err == n_lng_err
            assert lat_err == n_lat_err

            # neighbour is in disc 4x error
            assert (lng == n_lng or  # east / west
                    lng - 2 * lng_err == n_lng or lng - 2 * lng_err + 360 == n_lng or  # south
                    lng + 2 * lng_err == n_lng or lng + 2 * lng_err - 360 == n_lng)  # north

            assert (lat == n_lat or  # north / south
                    lat - 2 * lat_err == n_lat or lat - 2 * lat_err + 180 == n_lat or  # west
                    lat + 2 * lat_err == n_lat or lat + 2 * lat_err - 180 == n_lat)  # east
Ejemplo n.º 6
0
 def __smallest_container(self, poly: Polygon) -> Union[str, None]:
     centroid_coords = poly.centroid.coords[0]
     gh_centroid = self.__gh_encode(*centroid_coords)
     i = 0
     while not self.__gh_contains_poly(gh_centroid, poly):
         i += 1
         if i == self.gh_len:
             return None
         gh_centroid = ghh.encode(*centroid_coords,
                                  precision=self.gh_len - i)
     return gh_centroid
Ejemplo n.º 7
0
def bbox_hash(bbox):
    """Get geohash from rectangle covering the complete bbox.

    Parameters:
        bbox: Tuple[float, float, float, float]  Bounding box, (minlng, minlat, maxlng, maxlat)

    Returns:
        str: geohash covering the complete bbox.
    """
    minlng, minlat, maxlng, maxlat = bbox

    # ensure values are in range
    minlng = max(-180, minlng)
    maxlng = min(180, maxlng)
    minlat = max(-90, minlat)
    maxlat = min(90, maxlat)

    ll = encode(lng=minlng, lat=minlat, precision=16, bits_per_char=4)
    ur = encode(lng=maxlng, lat=maxlat, precision=16, bits_per_char=4)

    return commonprefix((ll, ur))
Ejemplo n.º 8
0
    def locate(ssid):
        with shelve.open(Geolocator._CACHE, writeback=True) as cache:
            try:
                if Geolocator._NETWORKS not in cache:
                    cache[Geolocator._NETWORKS] = {}

                last_update = cache[Geolocator._NETWORKS][ssid][
                    Geolocator._DATE]
                age = date.today() - last_update
                if age.days > Geolocator._MAX_AGE:
                    raise Geolocator.Outdated()
                # cache valid
            except (Geolocator.Outdated, KeyError):
                # cache miss or outdated, fetch from WiGLE
                last_update = date.today()
                try:
                    # fetch from wigle
                    response = pygle_api.search(ssid=ssid)
                    locations = {}
                    for res in response['results']:
                        #print((res['trilat'],res['trilong']))
                        geohash = ghh.encode(res['trilong'],
                                             res['trilat'],
                                             precision=Geolocator._PRECISION,
                                             bits_per_char=Geolocator._BPC)
                        lon, lat = ghh.decode(
                            geohash, bits_per_char=Geolocator._BPC
                        )  # limit precission of stored coords to match geohash
                        locations[geohash] = lat, lon
                    totalresults = response['totalResults']

                    if totalresults:
                        if ssid not in cache[Geolocator._NETWORKS]:
                            cache[Geolocator._NETWORKS][ssid] = {
                                Geolocator._LOCATIONS: {}
                            }
                        cache[Geolocator._NETWORKS][ssid][
                            Geolocator._DATE] = last_update
                        for geohash, coords in locations.items():
                            cache[Geolocator._NETWORKS][ssid][
                                Geolocator._LOCATIONS][geohash] = coords
                    else:
                        print('No results for SSID: %s' % ssid)
                except KeyError as e:
                    print(_API_HITS_ERROR_STR)
            finally:
                if ssid in cache[Geolocator._NETWORKS]:
                    result = cache[Geolocator._NETWORKS][ssid][
                        Geolocator._LOCATIONS].values()
                else:
                    result = None

        return last_update, result
Ejemplo n.º 9
0
def test_hilbert_curve(bpc, prec):
    hc = utils.hilbert_curve(prec, bpc)
    bits = bpc * prec

    assert isinstance(hc, dict)
    assert hc['type'] == 'Feature'
    assert hc['geometry']['type'] == 'LineString'

    coords = hc['geometry']['coordinates']
    assert 1 << bits == len(coords)

    for i, coord in enumerate(coords):
        code = encode(*coord, precision=prec, bits_per_char=bpc)
        assert i == decode_int(code, bpc)
Ejemplo n.º 10
0
    def get_geoset(
        self,
        lat: float,
        lon: float,
        obj_type: str = "building",
    ) -> Union[None, List[Polygon]]:
        """Given a location in latitude and longitude coordinates, return 
        all polygons in vicinity. 

        Args:
            lat (float): The latitude coordinate of a location in decimal degrees.
            lon (float): The longitude coordinate of a location in decimal degrees.
            obj_type (str): The name used to construct a unique key for a Redis 
                sorted set. This name should describe the category of polygons we are
                indexing in Redis. Defaults to "building". 
                
                TODO: In the future we might want cache other types of polygons or be 
                more specific w.r.t. building type.

        Returns:
            Union[None, bool]: If the query was successful, then True is returned if 
                the location is inside a build. False if the location is not inside a 
                building and None if the query was not successful.
        """
        # Compute a geohash for the given location
        geohash = ghh.encode(
            lon,
            lat,  # NOTE: Make sure first arg is lon, then lat! 
            precision=self.geohash_precision,
            bits_per_char=self.geohash_bits_per_char,
        )
        # Get the bounding box encoded by the geohash
        geohash_data = ghh.rectangle(geohash)
        geohash_bbox = geohash_data["bbox"]
        # Swap elements in bbox. We get elements (lon, lat), but we want (lat, lon)
        geohash_bbox = [
            geohash_bbox[1],
            geohash_bbox[0],
            geohash_bbox[3],
            geohash_bbox[2],
        ]
        # Contruct the key of the sorted set in Redis that we want to access
        key = self.get_key(obj_type=obj_type, geohash=geohash)
        polygons = self._get_geoset(
            key=key,
            lat=lat,
            lon=lon,
            bbox=geohash_bbox,
        )
        return polygons
Ejemplo n.º 11
0
def adapt_ghash(coords):

    if len(coords) == 0:
        center_p = []
        bbox = []
        ghcode = "null"
    else:
        line = LineString(coords)
        cpoint = line.centroid
        bbox = line.bounds
        edgeLen = max(bbox[2] - bbox[0], bbox[3] - bbox[1])
        level = int(math.ceil(log(360 / edgeLen) / log(2) * 2 / 5))
        ghcode = ghh.encode(cpoint.x, cpoint.y, level)
        center_p = [cpoint.x, cpoint.y]
    return center_p, bbox, ghcode
Ejemplo n.º 12
0
def encode(eList):
    """计算出将每条河流的唯一标示UUID"""
    classCode = 160201
    for i in range(len(eList)):
        geom = shapely.geometry.asShape(eList[i]['geometry'])
        cpoint = geom.centroid
        bbox = geom.bounds
        edgeLen = max(bbox[2] - bbox[0], bbox[3] - bbox[1])
        level = int(math.ceil(math.log2(360 / edgeLen) * 2 / 5))
        if level <= 0:
            level = 1
        #计算geohashcode
        ghcode = ghh.encode(cpoint.x, cpoint.y, level)
        #唯一标识 UUID=类别码+位置码+顺序码
        UUID = str(classCode) + ghcode + "00"
        eList[i]['properties']['UUID'] = UUID
    print('Step3: encoding finished')
    return eList
Ejemplo n.º 13
0
 def __gh_encode(self, lon, lat):
     return ghh.encode(lon, lat, precision=self.gh_len)
Ejemplo n.º 14
0
import geohash_hilbert as ghh
print("Encode: ", ghh.encode(48.668983, -4.32915))

print("Decode: ", ghh.decode('oyTsqesqzy'))
#Z7fe2GaIVO

print("Rectangle: ", ghh.rectangle('oyTs'))
Ejemplo n.º 15
0
'''

if __name__ == "__main__":
    filename = sys.argv[1]
    df = dd.read_csv(filename)
    X = np.array(df[[
        'pickup_longitude', 'pickup_latitude', 'dropoff_longitude',
        'dropoff_latitude'
    ]],
                 dtype='float64')
    p_map = {}
    import pdb
    d_map = {}
    valid_count = 0
    for idx, item in enumerate(X):
        if item[0] == 0.0:
            continue
        if item[2] == 0.0:
            continue
        _pcode = ghh.encode(item[0], item[1])
        _dcode = ghh.encode(item[2], item[3])
        p_map = insert_or_increment(p_map, _pcode)
        d_map = insert_or_increment(d_map, _dcode)
        valid_count += 1
    _plist = reduce(
        reduce(reduce(reduce(reduce(reduce(reduce(list(p_map.items()))))))))
    _plist.sort(key=lambda x: x[1], reverse=True)
    _dlist = reduce(list(d_map.items()))
    _dlist.sort(key=lambda x: x[1], reverse=True)
    pdb.set_trace()
    print(_plist)
Ejemplo n.º 16
0
    def __init__(self, x, y):
        if not os.path.exists(os.path.join(os.getcwd(), 'data')):
            raise OSError("Must first download data, see README.md")
        self.data_dir = os.path.join(os.getcwd(), 'data')

        if not os.path.exists(os.path.join(self.data_dir, 'sub_graphs')):
            os.mkdir(os.path.join(self.data_dir, 'sub_graphs'))
        sub_graphs_dir = os.path.join(self.data_dir, 'sub_graphs')

        if not os.path.exists(os.path.join(self.data_dir,
                                           'historical_speeds')):
            os.mkdir(os.path.join(self.data_dir, 'historical_speeds'))
        self.historical_speeds_dir = os.path.join(self.data_dir,
                                                  'historical_speeds')

        # DICTIONARIES
        if not os.path.exists(os.path.join(self.data_dir, 'avg_speeds')):
            os.mkdir(os.path.join(self.data_dir, 'avg_speeds'))
        self.avg_speeds_dir = os.path.join(self.data_dir, 'avg_speeds')

        # print(sub_graphs_dir)
        self.sub_graph_dict = geo_utils.read_saved_sub_graphs(sub_graphs_dir)
        # print(self.sub_graph_dict)

        target_area = LOCAL_RSU_VARS.EXTENDED_DOWNTOWN_NASH_POLY
        # Polys are just the larger boundaries to be used to "decrease" the number since we still use geohashing and it is still limited.
        polys = gb.divide_grid(target_area, (x, y))

        # NOTE: Only needed by some functions, maybe better to move?!
        self.rsu_arr = []
        for i in range(x):
            for j in range(y):
                idx = j + (i * y)
                p = polys[idx]
                gid = ghh.encode(p.centroid.x, p.centroid.y, precision=6)
                r = ag.adjustable_RSU(gid, p, (i, j))
                r.set_max_size(x, y)
                self.rsu_arr.append(r)

        file_path = os.path.join(self.data_dir, '{}-{}-G.pkl'.format(x, y))
        with open(file_path, 'rb') as handle:
            self.whole_graph = pickle.load(handle)
        # print("Whole graph: ", len(self.whole_graph.nodes))
        # Find what are the things needed to perform the route execution

        self._mongodb = None

        # TODO: Fix or remove after use
        if not os.path.exists(os.path.join(self.data_dir, 'quartiles')):
            os.mkdir(os.path.join(self.data_dir, 'quartiles'))
        quartiles_dir = os.path.join(self.data_dir, 'quartiles')

        file_path = os.path.join(quartiles_dir, f'first_quartile.pkl')
        with open(file_path, 'rb') as f:
            self.first_quartile_list = pickle.load(f)

        file_path = os.path.join(quartiles_dir, f'second_quartile.pkl')
        with open(file_path, 'rb') as f:
            self.second_quartile_list = pickle.load(f)

        file_path = os.path.join(quartiles_dir, f'third_quartile.pkl')
        with open(file_path, 'rb') as f:
            self.third_quartile_list = pickle.load(f)

        file_path = os.path.join(quartiles_dir, f'fourth_quartile.pkl')
        with open(file_path, 'rb') as f:
            self.fourth_quartile_list = pickle.load(f)