Exemple #1
0
def get_nearest_stops(key, lng, lat, radius=150, units='m', with_dist=False, with_coord=False, with_hash=False, sort=None):
    if not api_redis.exists('locations'):
        set_cache_stops()
    pieces = [key, lng, lat, radius, units]

    # XXX Can modify precision based on user radius. Default to 36 (~150m)
    ranges = geohash.expand_uint64(geohash.encode_uint64(float(lat), float(lng)), precision=36)

    # XXX This has no affect at this time
    if with_dist:
        pieces.append('WITHDIST')
    if with_coord:
        pieces.append('WITHCOORD')
    if with_hash:
        pieces.append('WITHHASH')
    if sort:
        pieces.append(sort)

    # TODO Some day this can be done with the Geo Redis commands
    pipe = api_redis.pipeline(transaction=False)
    for r in ranges:
        pipe.zrangebyscore(key, r[0], r[1])

    # TODO This will require some intermediate steps if scores are returned
    stops = [stop for stop_list in pipe.execute() for stop in stop_list]
    # TODO Implement filtering by radius and units
    return get_stop_details(stops)
Exemple #2
0
    def geohash_values(self, lat, lon):
        hsh = geohash.encode(lat, lon) if lat and lon else None

        return {
            "hash": hsh,
            "hash_u64":
            geohash.encode_uint64(lat, lon) if lat and lon else None,
            "hash_bin": hsh[:self.bb_prefix] if hsh else None
        }
Exemple #3
0
    def build_filters(self, filters=None):
        if filters is None:
            filters = {}
        if 'range' in filters:
            ran = int(filters.pop('range')[0])
        else:
            ran = 30
        orm_filters = super(BeatResource, self).build_filters(filters)

        # QueryDict values are always list, get the first item
        lat = orm_filters.pop('lat__exact', (None,))[0]
        lng = orm_filters.pop('lng__exact', (None,))[0]

        if (lat is not None) and (lng is not None):
            gh = geohash.encode_uint64(float(lat), float(lng))
            orm_filters.update({'geohash_exact': gh})

        orm_filters.update({'range': ran})
        return orm_filters
    def __init__(self, filename, geohashSize):
        if filename:
            self.dictionary = dict()
            self.geodictionary = defaultdict(list)
            with open(filename, "rb") as csvfile:
                datareader = csv.reader(csvfile)

                next(datareader, None)  # skip the headers
                for row in datareader:
                    id = UUID(row[Column.id])
                    name = row[Column.name]
                    lat = float(row[Column.lat])
                    long = float(row[Column.long])
                    geo_hash64 = geohash.encode_uint64(lat, long)
                    geo_hash = geohash.encode(lat, long, 20)

                    shop = Shop(id, lat, long, geo_hash)
                    self.dictionary[id] = shop
                    self.geodictionary[geo_hash[:geohashSize]].append(shop)
 def test_one(self):
     dataset = [
         (0x0000000000000000, -90.0, -180.0),
         (0x0800000000000000, -90.0, -135.0),
         (0x1000000000000000, -45.0, -180.0),
         (0x1800000000000000, -45.0, -135.0),
         (0x2000000000000000, -90.0, -90.0),
         (0x2800000000000000, -90.0, -45.0),
         (0x3000000000000000, -45.0, -90.0),
         (0x3800000000000000, -45.0, -45.0),
         (0x4000000000000000, 0.0, -180.0),
         (0x4800000000000000, 0.0, -135.0),
         (0x5000000000000000, 45.0, -180.0),
         (0x5800000000000000, 45.0, -135.0),
         (0x6000000000000000, 0.0, -90.0),
         (0x6800000000000000, 0.0, -45.0),
         (0x7000000000000000, 45.0, -90.0),
         (0x7800000000000000, 45.0, -45.0),
         (0x8000000000000000, -90.0, 0.0),
         (0x8800000000000000, -90.0, 45.0),
         (0x9000000000000000, -45.0, 0.0),
         (0x9800000000000000, -45.0, 45.0),
         (0xA000000000000000, -90.0, 90.0),
         (0xA800000000000000, -90.0, 135.0),
         (0xB000000000000000, -45.0, 90.0),
         (0xB800000000000000, -45.0, 135.0),
         (0xC000000000000000, 0.0, 0.0),
         (0xC800000000000000, 0.0, 45.0),
         (0xD000000000000000, 45.0, 0.0),
         (0xD800000000000000, 45.0, 45.0),
         (0xE000000000000000, 0.0, 90.0),
         (0xE800000000000000, 0.0, 135.0),
         (0xF000000000000000, 45.0, 90.0),
         (0xF800000000000000, 45.0, 135.0)
     ]
     for data in dataset:
         self.assertEqual(data[0], geohash.encode_uint64(data[1], data[2]))
         latlon = geohash.decode_uint64(data[0])
         self.assertEqual(latlon[0], data[1])
         self.assertEqual(latlon[1], data[2])
def index_restaurant_data(args):
    datafile = args.data
    host = args.host
    port = args.port

    pipe = redis.StrictRedis(host=host, port=port).pipeline()
    with open(datafile, 'r') as f:
        count = 0
        for linum, line in enumerate(f):
            try:
                count += 1
                rest = json.loads(line, 'ISO-8859-1')
                lat, lon = rest['location']
                h = hash_location(lat, lon)
                key = 'geobox:%s:restaurant' % h
                pipe.zadd(key, geohash.encode_uint64(lat, lon), line.strip())
                if count % 1000 == 0:
                    pipe.execute()
                    print '%d items has been inserted' % count
            except Exception as e:
                print str(linum) + ' ' + line
                raise e
Exemple #7
0
 def test_one(self):
     dataset = [(0x0000000000000000, -90.0, -180.0),
                (0x0800000000000000, -90.0, -135.0),
                (0x1000000000000000, -45.0, -180.0),
                (0x1800000000000000, -45.0, -135.0),
                (0x2000000000000000, -90.0, -90.0),
                (0x2800000000000000, -90.0, -45.0),
                (0x3000000000000000, -45.0, -90.0),
                (0x3800000000000000, -45.0, -45.0),
                (0x4000000000000000, 0.0, -180.0),
                (0x4800000000000000, 0.0, -135.0),
                (0x5000000000000000, 45.0, -180.0),
                (0x5800000000000000, 45.0, -135.0),
                (0x6000000000000000, 0.0, -90.0),
                (0x6800000000000000, 0.0, -45.0),
                (0x7000000000000000, 45.0, -90.0),
                (0x7800000000000000, 45.0, -45.0),
                (0x8000000000000000, -90.0, 0.0),
                (0x8800000000000000, -90.0, 45.0),
                (0x9000000000000000, -45.0, 0.0),
                (0x9800000000000000, -45.0, 45.0),
                (0xA000000000000000, -90.0, 90.0),
                (0xA800000000000000, -90.0, 135.0),
                (0xB000000000000000, -45.0, 90.0),
                (0xB800000000000000, -45.0, 135.0),
                (0xC000000000000000, 0.0, 0.0),
                (0xC800000000000000, 0.0, 45.0),
                (0xD000000000000000, 45.0, 0.0),
                (0xD800000000000000, 45.0, 45.0),
                (0xE000000000000000, 0.0, 90.0),
                (0xE800000000000000, 0.0, 135.0),
                (0xF000000000000000, 45.0, 90.0),
                (0xF800000000000000, 45.0, 135.0)]
     for data in dataset:
         self.assertEqual(data[0], geohash.encode_uint64(data[1], data[2]))
         latlon = geohash.decode_uint64(data[0])
         self.assertEqual(latlon[0], data[1])
         self.assertEqual(latlon[1], data[2])
Exemple #8
0
 def _encode(self, lat, lon):
     return geohash.encode_uint64(lat, lon)
def adaptive_geohash_nearby_search(latitude, longitude, lower_cut, upper_cut, select_cols, count_col, lat_col, lon_col,
                                    geoint_col, custom_where_clause, db_conn):
    '''
    Given the latitude, longitude of a point of interest, the lower and upper
    cut in the result pagination, the columns to select, the primary key
    column, the latitude and longitude columns, the geohash column, a
    custom SQL WHERE clause string to add, and a SQLAlchemy database
    connection, returns a list of selected SQLAlchemy rows as requested in
    the range of lower_cut and upper_cut, ordered by distance to the point
    provided, ascending. Returns None if the geohash search failed (should
    fallback to other search methods).

    latitude: latitude of the point to search around.
    longitude: longitude of the point to search around.
    lower_cut: the starting slice index of the results wanted (same rule as
                list slicing).
    upper_cut: the ending slice index of the results wanted (same rule as
                list slicing).
    select_cols: list of SQLAlchemy table column objects to select in the
                result.
    count_col: a SQLAlchemy table column object that is in the table used
                to count potential results, the primary key column is
                recommended, but any other singly unique column would work.
    lat_col: a SQLAlchemy table column object, the latitude column in the
                table.
    lon_col: a SQLAlchemy table column object, the longitude column in the
                table.
    geoint_col: a SQLAlchemy table column object, the geohash column in the
                table.
    custom_where_clause: any custom SQL WHERE conditions to add in addition
                        to the nearby query. Must be sanitized, and lead
                        with the logic connector (e.g. AND) to join with
                        the geohash queries.
    db_conn: a SQLAlchemy database connection, used to perform the database
            query.
    '''

    if not (isinstance(latitude, (int, float)) or not isinstance(longitude, (int, float)) or not isinstance(lower_cut, int) or not isinstance(upper_cut, int)):
        raise InvalidParamException()
    if lat_col not in select_cols:
        select_cols.append(lat_col)
    if lon_col not in select_cols:
        select_cols.append(lon_col)
    max_result_cap = upper_cut
    # Geohash may give inaccurate results for smaller result numbers, so
    # make sure we at least get 20 rows, which can be discarded later
    max_result_cap = max(max_result_cap, 20)
    coord_geoint = geohash.encode_uint64(latitude, longitude)
    MAX_DB_HITS = 20 # A limit on the maximum number of repeated database queries before giving up
    precision = 50 # Starting precision
    previous_precision = 50
    previous_result_size = 0
    previous_where_clause = ''
    stop = False
    fall_back = False
    loop_count = 0
    while not stop and not fall_back:
        where_clause = get_geohash_where_clause(coord_geoint, precision, geoint_col.name)
        if len(where_clause) == 0:
            # Cannot find anything, fallback to other search methods
            stop = True
            fall_back = True
        else:
            if len(custom_where_clause) > 0:
                where_clause += custom_where_clause
            data_count = db_conn.execute(select(columns=[func.count(count_col)],
                                            whereclause=where_clause)).first()
            result_size = data_count[0]
            loop_count += 1
            if result_size < previous_result_size:
                # Precision getting too low, result size shrinking, stop and use the previous where clause
                where_clause = previous_where_clause
                stop = True
            else:
                previous_result_size = result_size
                previous_precision = precision
                previous_where_clause = where_clause
            if result_size >= max_result_cap:
                stop = True
            if result_size < max_result_cap and loop_count >= MAX_DB_HITS:
                # Hit the max database query limit, fall back to other search methods
                stop = True
                fall_back = True
            else:
                percent_returned = float(result_size) / float(max_result_cap)
                # Adaptively decrease the search precision based on the percent of rows
                # we have found, while making sure that the precision does not decrease
                # to too low
                if percent_returned == 0 and (1.0 / float(max_result_cap)) < 0.05:
                    if precision <= 4:
                        precision -= 1
                    else:
                        precision -= 8
                        precision = max(4, precision)
                elif percent_returned == 0 and (1.0 / float(max_result_cap)) >= 0.05:
                    if precision <= 4:
                        precision -= 1
                    else:
                        precision -= 7
                        precision = max(4, precision)
                elif percent_returned <= 0.05:
                    if precision <= 4:
                        precision -= 1
                    else:
                        precision -= 6
                        precision = max(4, precision)
                elif percent_returned < 0.25:
                    if precision <= 4:
                        precision -= 1
                    else:
                        precision -= 4
                        precision = max(4, precision)
                elif percent_returned < 0.5:
                    if precision <= 3:
                        precision -= 1
                    else:
                        precision -= 2
                        precision = max(3, precision)
                else:
                    precision -= 1
                precision = max(0, precision)
    if not fall_back:
        # Actually fetch the results based on the last WHERE clause used, which is successful
        data = db_conn.execute(select(columns=select_cols,
                                    whereclause=where_clause)).fetchall()
        sorted_list = []
        for row in data:
            sorted_list.append([row, distance(latitude, longitude, row[lat_col], row[lon_col])])
        # Sort the results by distance
        sorted_list = sorted(sorted_list, key=lambda sort_elem : sort_elem[1])
        return [i[0] for i in sorted_list][lower_cut:upper_cut]
    else:
        # Geohash search failed, signal the failure
        return None
Exemple #10
0
def score(lng, lat, precision=BIT_COUNT):
    h = encode_uint64(lat, lng) >> (64-BIT_COUNT)
    return (h >> (BIT_COUNT-precision)) << (BIT_COUNT-precision)
Exemple #11
0
def set_cache_stops():
    stops = get_stops_on_routes()
    key = '{0}.{1}.{2}.{3}'
    pipe = api_redis.pipeline(transaction=False)
    for stop in stops:
        stop['lat'], stop['lng'] = get_parsed_coordinate(stop[LAT_KEY]), get_parsed_coordinate(stop[LNG_KEY])
        pipe.set(key.format(stop['abbreviation'], stop['stopID'], stop['directionID'], stop['sequence']), yaml.safe_dump(stop))
        # TODO Some day this can be done with the Geo Redis commands
        pipe.zadd('locations', key.format(stop['abbreviation'], stop['stopID'], stop['directionID'], stop['sequence']), geohash.encode_uint64(stop['lat'], stop['lng']))
    pipe.execute()
Exemple #12
0
 def save(self, *args, **kwargs):
     if not self.geohash:
         self.geohash = geo_hash.encode_uint64(self.lat, self.lng)
     super(Place, self).save(*args, **kwargs)
Exemple #13
0
    for sensor_type in [("temperature", (-10, 40)), ("humidity", (30, 90))]:
        if random.random() < 0.5:
            city_num = random.randint(0, len(city) - 1)
            (device_name, latitude, longitude) = city[city_num]
        else:
            device_name = "Random"
            latitude = random.uniform(-80, 80)
            longitude = random.uniform(-180, 180)
        (sensor_name, (min_value, max_value)) = sensor_type
        message_data = {}
        if device_name == location:
            if sensor_type == "temperature":
                message_data["sensor_value"] = sense.get_temperature()
            else:
                message_data["sensor_value"] = sense.get_humidity()
            sense.show_message(location)
        else:
            message_data["sensor_value"] = min_value + (
                max_value - min_value) * random.random()

        message_data["latitude"] = latitude
        message_data["longitude"] = longitude
        message_data["geohash"] = geohash.encode(latitude, longitude)
        message_data["geohash_uint64"] = geohash.encode_uint64(
            latitude, longitude)
        message_json = json.dumps(message_data)
        publish_topic = "myapp/" + device_name + "/sensor/" + sensor_name
        logger.debug(publish_topic + " -> " + message_json)
        myAWSIoTMQTTClient.publish(publish_topic, message_json, 1)
    time.sleep(0.2)