Exemplo n.º 1
0
def build_geohash():
    for lat in range(int(CHENGDU_LAT_LON["lat_min"] * 1000000),
                     int(CHENGDU_LAT_LON["lat_max"] * 1000000), 50000):
        for lon in range(int(CHENGDU_LAT_LON["lon_min"] * 1000000),
                         int(CHENGDU_LAT_LON["lon_max"] * 1000000), 50000):
            yield geohash.encode(lat / 1000000,
                                 lon / 1000000), lat / 1000000, lon / 1000000
Exemplo n.º 2
0
def geoEncoding(data, precision):
    temp = data[['start_lat', 'start_lon']]
    geohashList = []
    for i in temp.values:
        geohashList.append(geohash.encode(i[0], i[1], precision))
    data['geohash{}'.format(precision)] = geohashList
    return data
Exemplo n.º 3
0
def ip_to_genhash(ip):
    with geoip2.database.Reader(PATH + '/GeoIP2-City.mmdb') as reader:
        print(ip)
        response = reader.city(ip)
        latitude = response.location.latitude
        longitude = response.location.longitude
        result = geohash.encode(latitude, longitude)
        print(result)
    return result
Exemplo n.º 4
0
def closestCity(lat, lon):
    if len(majorCityMap) == 0:
        loadMajorCityMap()
    gh = geohash.encode(lat, lon, 3)
    if gh in majorCityMap:
        (hash, name, country_code, admin2_code, admin1_code, latitude, longitude, population) = majorCityMap[gh]
    else:
        (name, country_code, admin2_code, admin1_code, latitude, longitude) = ("?","?","?","?","?","?")
    return name, country_code, admin2_code, admin1_code, latitude, longitude
Exemplo n.º 5
0
 def create(self, validated_data):
     latitude = validated_data.get("latitude")
     if latitude == None or latitude == '':
         raise serializers.ValidationError('未提供纬度')
     longitude = validated_data.get("longitude")
     if longitude == None or longitude == '':
         raise serializers.ValidationError('未提供经度')
     code = geohash.encode(float(latitude), float(longitude))
     validated_data['geohash'] = code
     addr = Address.objects.create(**validated_data)
     return addr
Exemplo n.º 6
0
 def update(self, instance, validated_data):
     instance.name = validated_data.get("name")
     instance.content = validated_data.get("content")
     instance.country = validated_data.get("country")
     instance.province = validated_data.get("province")
     instance.city = validated_data.get("city")
     instance.city_zone = validated_data.get("city_zone")
     latitude = validated_data.get("latitude")
     if latitude == None or latitude == '':
         raise serializers.ValidationError('未提供纬度')
     instance.latitude = latitude
     longitude = validated_data.get("longitude")
     if longitude == None or longitude == '':
         raise serializers.ValidationError('未提供经度')
     instance.longitude = longitude
     instance.geohash = geohash.encode(float(latitude), float(longitude))
     instance.save()
     return instance
    def reverse(self, latitude, longitude):
        addr = None

        # try to get the address from the local cache, if we're using it
        if self._using_cache():
            ghash = geohash.encode(float(latitude), float(longitude), self._reverse_cache_geohash_length)
            addr = self._reverse_cache.get(ghash)

        # if we didn't get the address from the cache, or we're not using the cache
        # then get it from Google
        if not addr:
            self._reverse_limiter.wait()
            loc = self._geolocator.reverse((latitude, longitude), exactly_one=True)
            addr = loc.address

        # if we're using the cache, save the value we just got back
        if addr and self._using_cache():
            self._reverse_cache[ghash] = addr

        return addr
Exemplo n.º 8
0
 def on_status(self, status):
     if status.coordinates:
         lon, lat = status.coordinates['coordinates']
         author = status.author
         hashtags = self.detect_hashtags(status.text)
         payload = {
             'author': {
                 'avatar_url': author.profile_image_url,
                 'name': author.name,
                 'handle': author.screen_name,
                 'profile_text_color': author.profile_text_color,
                 'profile_background_color': author.profile_background_color
                 },
             'geo': geohash.encode(lat, lon, self.PRECISION),
             'text': status.text,
             'num_retweets': status.retweet_count,
             'source_url': status.source_url,
             'id': status.id_str,
             'date': status.timestamp_ms,
             'hashtags': hashtags
         }
         self.es.index(index='tweets', doc_type='tweets', body=payload)
Exemplo n.º 9
0
 def on_status(self, status):
     if status.coordinates:
         lon, lat = status.coordinates['coordinates']
         author = status.author
         hashtags = self.detect_hashtags(status.text)
         payload = {
             'author': {
                 'avatar_url': author.profile_image_url,
                 'name': author.name,
                 'handle': author.screen_name,
                 'profile_text_color': author.profile_text_color,
                 'profile_background_color': author.profile_background_color
             },
             'geo': geohash.encode(lat, lon, self.PRECISION),
             'text': status.text,
             'num_retweets': status.retweet_count,
             'source_url': status.source_url,
             'id': status.id_str,
             'date': status.timestamp_ms,
             'hashtags': hashtags
         }
         self.es.index(index='tweets', doc_type='tweets', body=payload)
Exemplo n.º 10
0
def trajectory_set_grid(traj_set, precision, time=False):
    extremums = np.array(map(get_extremum, traj_set))
    p_bottom_left = [min(extremums[:, 0]), min(extremums[:, 1])]
    p_top_right = [max(extremums[:, 2]), max(extremums[:, 3])]
    p_ble = geoh.encode(p_bottom_left[1], p_bottom_left[0], precision)
    p_tre = geoh.encode(p_top_right[1], p_top_right[0], precision)
    lat_ble, lon_ble, dlat, dlon = geoh.decode_exactly(p_ble)
    lat_tre, lon_tre, dlat, dlon = geoh.decode_exactly(p_tre)
    lats_all = np.arange(lat_ble - dlat, lat_tre + (3 * dlat), dlat * 2)
    lons_all = np.arange(lon_ble - dlon, lon_tre + 3 * dlon, dlon * 2)
    lats_center_all = np.arange(lat_ble, lat_tre + 2 * dlat, dlat * 2)
    lons_center_all = np.arange(lon_ble, lon_tre + 2 * dlon, dlon * 2)

    cells_traj = []
    for traj in traj_set:
        p_start = traj[0]
        cell_start_x = np.where(lons_all < p_start[0])[0][-1]
        cell_start_y = np.where(lats_all < p_start[1])[0][-1]
        cell_start = [cell_start_x, cell_start_y]

        cells = []

        for id_seg in range(len(traj) - 1):
            start = traj[id_seg]
            end = traj[id_seg + 1]
            if time:
                cell_start_time = start[2]
            if abs(start[0] - end[0]) / dlon > abs(start[1] - end[1]) / dlat:
                cell, cells_coord = linecell_lons_bigger_step(start, end, cell_start[:2], lons_all, lats_all,
                                                              lons_center_all,
                                                              lats_center_all)
            else:
                cell, cells_coord = linecell_lats_bigger_step(start, end, cell_start[:2], lons_all, lats_all,
                                                              lons_center_all,
                                                              lats_center_all)
            if time:
                if not cells:
                    cell_time = [cell[0] + [True, [cell_start_time]]]
                else:
                    if cell[0] == cells[-1][:2]:
                        cells[-1][3].append(cell_start_time)
                        cell_time = []
                    else:
                        cell_time = [cell[0] + [True, [cell_start_time]]]
                cell_time = cell_time + map(lambda x: x + [False, -1], cell[1:-1])
            else:
                if not cells:
                    cell_time = [cell[0] + [True]]
                else:
                    if cell[0] == cells[-1][:2]:
                        cell_time = []
                    else:
                        cell_time = [cell[0] + [True]]
                cell_time = cell_time + map(lambda x: x + [False], cell[1:-1])

            cells.extend(cell_time)
            cell_start = cell[-1]
        if time:
            cell_end_time = end[2]
            if cell_start == cells[-1][:2]:
                cells[-1][3].append(cell_end_time)
            else:
                cells.append(cell_start + [True, [cell_end_time]])
        else:
            if cell_start != cells[-1][:2]:
                cells.append(cell_start + [True])
        cells_traj.append(cells)
    # cells_traj_=map(np.array,cells_traj)
    return cells_traj, lons_all, lats_all, lons_center_all, lats_center_all
Exemplo n.º 11
0
def get_tweet_feed():
    """
    Tweet Feed just provides the 15 most recent tweets if no parameters are
    passed in, otherwise provides locational tweets.
    """
    hits = []
    search_body = None
    # Process the parameters
    geo_params = parse_request_args(request.args)
    # Enforce mandatory geographic information
    if None not in geo_params:
        lat, lon, precision_raw = map(float, geo_params)
        precision = get_precision_from_raw(precision_raw)
        geo = geohash.encode(lat, lon, precision)

        # The sorting - root.sort
        sorting_terms_array = [{
            '_geo_distance': {
                'geo': {
                    'lat': lat,
                    'lon': lon
                },
                'order': 'asc',
                'unit': 'km',
                'distance_type': 'plane'
            }
        }]

        # The list of should matches - root.query.filtered.query.bool.should
        # The array of OR'd hashtags to support multi-tag search
        tag_should_array = []

        # Add in any hashtags
        hashtags_to_search, local_search_only = get_hashtags_args(request.args)
        if len(hashtags_to_search) > 0:
            # Append every hashtag into the should array
            tag_should_array += [{
                'match': {
                    'hashtags': tag
                }
            } for tag in hashtags_to_search]

        # The query - root.query.filtered.query
        # Used for querying hashtags.
        body_query = {'bool': {'should': tag_should_array}}

        # The filter - root.query.filtered.filter
        # Filtering using geohash_cell
        body_filter = {
            'geohash_cell': {
                'geo': {
                    'lat': lat,
                    'lon': lon
                },
                'precision': precision,
                'neighbors': True
            }
        }

        # Clear the body filter if we should only be parsing local searches
        restrict_search = len(hashtags_to_search) > 0 and local_search_only or\
            len(hashtags_to_search) == 0

        if not restrict_search:
            body_filter = {}

        search_body = {
            'sort': sorting_terms_array,
            'query': {
                'filtered': {
                    'filter': body_filter,
                    'query': body_query
                }
            }
        }

        # This really should be paginated using from_
        result = g.es.search(index='tweets', size=250, body=search_body)
        hits = [build_hit(hit) for hit in result['hits']['hits']]

        return make_response({
            '_count': len(hits),
            '_took': result['took'],
            'hits': hits
        })

    # if we have bad geo params, give back nothing
    return make_response({'hits': [], '_took': 0})
Exemplo n.º 12
0
def trajectory_set_grid(traj_set, precision, time=False):
    extremums = np.array(map(get_extremum, traj_set))
    p_bottom_left = [min(extremums[:, 0]), min(extremums[:, 1])]
    p_top_right = [max(extremums[:, 2]), max(extremums[:, 3])]
    p_ble = geoh.encode(p_bottom_left[1], p_bottom_left[0], precision)
    p_tre = geoh.encode(p_top_right[1], p_top_right[0], precision)
    lat_ble, lon_ble, dlat, dlon = geoh.decode_exactly(p_ble)
    lat_tre, lon_tre, dlat, dlon = geoh.decode_exactly(p_tre)
    lats_all = np.arange(lat_ble - dlat, lat_tre + (3 * dlat), dlat * 2)
    lons_all = np.arange(lon_ble - dlon, lon_tre + 3 * dlon, dlon * 2)
    lats_center_all = np.arange(lat_ble, lat_tre + 2 * dlat, dlat * 2)
    lons_center_all = np.arange(lon_ble, lon_tre + 2 * dlon, dlon * 2)

    cells_traj = []
    for traj in traj_set:
        p_start = traj[0]
        cell_start_x = np.where(lons_all < p_start[0])[0][-1]
        cell_start_y = np.where(lats_all < p_start[1])[0][-1]
        cell_start = [cell_start_x, cell_start_y]

        cells = []

        for id_seg in range(len(traj) - 1):
            start = traj[id_seg]
            end = traj[id_seg + 1]
            if time:
                cell_start_time = start[2]
            if abs(start[0] - end[0]) / dlon > abs(start[1] - end[1]) / dlat:
                cell, cells_coord = linecell_lons_bigger_step(
                    start, end, cell_start[:2], lons_all, lats_all,
                    lons_center_all, lats_center_all)
            else:
                cell, cells_coord = linecell_lats_bigger_step(
                    start, end, cell_start[:2], lons_all, lats_all,
                    lons_center_all, lats_center_all)
            if time:
                if not cells:
                    cell_time = [cell[0] + [True, [cell_start_time]]]
                else:
                    if cell[0] == cells[-1][:2]:
                        cells[-1][3].append(cell_start_time)
                        cell_time = []
                    else:
                        cell_time = [cell[0] + [True, [cell_start_time]]]
                cell_time = cell_time + map(lambda x: x + [False, -1],
                                            cell[1:-1])
            else:
                if not cells:
                    cell_time = [cell[0] + [True]]
                else:
                    if cell[0] == cells[-1][:2]:
                        cell_time = []
                    else:
                        cell_time = [cell[0] + [True]]
                cell_time = cell_time + map(lambda x: x + [False], cell[1:-1])

            cells.extend(cell_time)
            cell_start = cell[-1]
        if time:
            cell_end_time = end[2]
            if cell_start == cells[-1][:2]:
                cells[-1][3].append(cell_end_time)
            else:
                cells.append(cell_start + [True, [cell_end_time]])
        else:
            if cell_start != cells[-1][:2]:
                cells.append(cell_start + [True])
        cells_traj.append(cells)
    # cells_traj_=map(np.array,cells_traj)
    return cells_traj, lons_all, lats_all, lons_center_all, lats_center_all
Exemplo n.º 13
0
def get_geohash(latitude, longitude):
    return geohash.encode(latitude=latitude, longitude=longitude)
Exemplo n.º 14
0
def generateCityMapping(level):
    ghashes = {}
    f = codecs.open("cities1000.txt", "r", "utf-8")
    h = codecs.open("largestCityPerHash" + str(level) + ".tab", "w", "utf-8")
    for line in f:
        line = line.rstrip()
        A = line.split(u'\t')
        if len(A) == 19:
            (geonameid           ,
                name             ,
                asciiname        , 
                alternatenames   ,
                latitude         , 
                longitude        , 
                feature_class    , 
                feature_code     , 
                country_code     , 
                cc2              , 
                admin1_code      , 
                admin2_code      , 
                admin3_code      , 
                admin4_code      , 
                population       , 
                elevation        , 
                dem              , 
                timezone         , 
                modification_date  ) = A
            if feature_class == 'P':
                gh = geohash.encode(float(latitude), float(longitude), level)
                if gh not in ghashes:
                    ghashes[gh] = []
                ghashes[gh].append( (long(population), name, country_code, admin2_code, admin1_code, latitude, longitude) )

    # print >> sys.stderr, "cities1000.txt processed"
    
    for gh in ghashes:
        if len(ghashes[gh]) > 1:
            ghashes[gh].sort(reverse=True)
            ghashes[gh] = [ ghashes[gh][0] ]
    f.close()
    
    # print >> sys.stderr, "ghashes CAR(sort())"
    
    # Now use airport list to see if we can fill any blanks:
    g = codecs.open("airport-codes.csv", "r", "utf-8")
    # print >> sys.stderr, "airport-codes.csv opened"
    for line in g:
        line = line.rstrip()
        A = line.split(u',')
        # print >> sys.stderr, str(len(A))
        if len(A) == 13:
            if A[0] == u"ident":
                continue
            (ident,type,name,latitude_deg,longitude_deg,elevation_ft,continent,iso_country,iso_region,municipality,gps_code,iata_code,local_code) = A
            # print >> sys.stderr, iata_code
            gh = geohash.encode(float(latitude_deg), float(longitude_deg), level)
            if gh not in ghashes:
                print >> sys.stderr, "Found airport for location:" + municipality + ", " + iso_country
                if len(municipality) == 0:
                    municipality = name
                ghashes[gh] = [ (0, municipality,  iso_country, name, iata_code, latitude_deg, longitude_deg) ]
    
    g.close()
    
    for gh in ghashes:
        print >> h, u"\t".join([ gh, ghashes[gh][0][1], ghashes[gh][0][2], ghashes[gh][0][3],ghashes[gh][0][4], str(ghashes[gh][0][5]),str(ghashes[gh][0][6]), str(ghashes[gh][0][0]) ])
    
    h.close()
Exemplo n.º 15
0
def get_tweet_feed():
    """
    Tweet Feed just provides the 15 most recent tweets if no parameters are
    passed in, otherwise provides locational tweets.
    """
    hits = []
    search_body = None
    # Process the parameters
    geo_params = parse_request_args(request.args)
    # Enforce mandatory geographic information
    if None not in geo_params:
        lat, lon, precision_raw = map(float, geo_params)
        precision = get_precision_from_raw(precision_raw)
        geo = geohash.encode(lat, lon, precision)

        # The sorting - root.sort
        sorting_terms_array = [
            {
                '_geo_distance': {
                    'geo': {
                        'lat': lat,
                        'lon': lon
                        },
                    'order': 'asc',
                    'unit': 'km',
                    'distance_type': 'plane'
                    }
                }
            ]

        # The list of should matches - root.query.filtered.query.bool.should
        # The array of OR'd hashtags to support multi-tag search
        tag_should_array = []

        # Add in any hashtags
        hashtags_to_search, local_search_only = get_hashtags_args(request.args)
        if len(hashtags_to_search) > 0:
            # Append every hashtag into the should array
            tag_should_array += [{'match': {'hashtags': tag}}
                                 for tag in hashtags_to_search]

        # The query - root.query.filtered.query
        # Used for querying hashtags.
        body_query = {
            'bool': {
                'should': tag_should_array
                }
            }

        # The filter - root.query.filtered.filter
        # Filtering using geohash_cell
        body_filter = {
            'geohash_cell': {
                'geo': {
                    'lat': lat,
                    'lon': lon
                    },
                'precision': precision,
                'neighbors': True
                }
            }

        # Clear the body filter if we should only be parsing local searches
        restrict_search = len(hashtags_to_search) > 0 and local_search_only or\
            len(hashtags_to_search) == 0

        if not restrict_search:
            body_filter = {}

        search_body = {
            'sort': sorting_terms_array,
            'query': {
                'filtered': {
                    'filter': body_filter,
                    'query': body_query
                    }
                }
            }

        # This really should be paginated using from_
        result = g.es.search(index='tweets', size=250, body=search_body)
        hits = [build_hit(hit) for hit in result['hits']['hits']]

        return make_response({'_count': len(hits), '_took': result['took'],
                              'hits': hits})

    # if we have bad geo params, give back nothing
    return make_response({'hits': [], '_took': 0})