Example #1
0
    def mapper(self, _, line):
        for row in csv.reader([line]):
            pickup_lat = row[17]
            pickup_lon = row[18]
            dropoff_lat = row[20]
            dropoff_lon = row[21]
            trip_seconds = row[4]

            #Skips header
            if trip_seconds == 'Trip Seconds':
                final_geohash = None
                trip_seconds = 0
                continue

            #skips rows with empty cells
            if not pickup_lat or not pickup_lon \
              or not dropoff_lat or not dropoff_lon \
              or not trip_seconds:
                final_geohash = None
                trip_seconds = 0
                continue

            time_of_day = process_time(row)
            if not time_of_day:
                final_geohash = None
                trip_seconds = 0

            else:
                pickup_geohash = geohash2.encode(float(pickup_lat),
                                                 float(pickup_lon), 7)
                dropoff_geohash = geohash2.encode(float(dropoff_lat),
                                                  float(dropoff_lon), 7)
                final_geohash = pickup_geohash + "_" + dropoff_geohash + "_" + time_of_day

        yield final_geohash, (trip_seconds, 1)
Example #2
0
def process():

    update_list = []
    with open('op-go.csv') as csv_file:
        reader = csv.DictReader(csv_file, fieldnames=fields)
        line_count = 0

        for row in reader:
            if line_count == 0:
                row.pop("geohash")
                row.update({'lat': 'lat'})
                row.update({'lon': 'lon'})
                line_count += 1
                update_list.append(row)
                continue
            else:
                hashed = row["geohash"]
                lat = Geohash.decode(hashed)[0]
                lon = Geohash.decode(hashed)[1]
                row.pop("geohash")
                row.update({'lat': lat})
                row.update({'lon': lon})

            update_list.append(row)

    with open('converted.csv', 'w') as myfile:
        wr = csv.writer(myfile)
        for row in update_list:
            wr.writerow(dict(row).values())
    def test_basic(self):

        hash = Geohash.encode(self.family[0], self.family[1], precision=20)

        (lats, lons) = Geohash.decode(hash)

        assert float(lats) == self.family[0]
        assert float(lons) == self.family[1]
Example #4
0
def get_latlon(result, end=True):
    if end:
        eloc_latlon = result['geohashed_end_loc'].apply(lambda x: geohash.decode_exactly(x))
        result['eloc_lat'] = eloc_latlon.apply(lambda x: float(x[0]))
        result['eloc_lon'] = eloc_latlon.apply(lambda x: float(x[1]))
    sloc_latlon = result['geohashed_start_loc'].apply(lambda x: geohash.decode_exactly(x))
    result['sloc_lat'] = sloc_latlon.apply(lambda x: float(x[0]))
    result['sloc_lon'] = sloc_latlon.apply(lambda x: float(x[1]))
    if end:
        result['eloc_sloc_lat_sub'] = result['eloc_lat'] - result['sloc_lat']
        result['eloc_sloc_lon_sub'] = result['eloc_lon'] - result['sloc_lon']
    return result
Example #5
0
def decode_geohash(data):
    result = data.copy()
    start_loc = np.array(result['geohashed_start_loc'].apply(
        lambda x: Geohash.decode(x)).tolist())
    result['start_loc_lat'] = start_loc[:, 0]
    result['start_loc_lon'] = start_loc[:, 1]
    if 'geohashed_end_loc' in data.columns:
        end_loc = np.array(result['geohashed_end_loc'].apply(
            lambda x: Geohash.decode(x)).tolist())
        result['end_loc_lat'] = end_loc[:, 0]
        result['end_loc_lon'] = end_loc[:, 1]
    return result
Example #6
0
def lochasher(lines):
	hashDict = {}
	for line in lines:
		if type(line[1]) != tuple:
			hashed = Geohash.encode(round(float(line[0][0]),6),round(float(line[0][1]),6),7)
			hashDict.setdefault(hashed,[]).append(line)
		else:
			hashed = Geohash.encode(round(float(line[0][1]),6),round(float(line[0][0]),6),7)
			hashedneighbors = geohash.neighbors(hashed)
			hashedneighbors.append(hashed)
			hashDict.setdefault((' '.join(hashedneighbors),line[1]),[]).append(line)
	return hashDict
Example #7
0
def get_loc_matrix():
    result_path = cache_path + 'loc_matrix.hdf'
    if os.path.exists(result_path):
        result = pd.read_hdf(result_path, 'w')
    else:
        train = pd.read_csv(train_path)
        test = pd.read_csv(test_path)
        end_loc = pd.DataFrame(
            {'geohashed_end_loc': list(train['geohashed_end_loc'].unique())})
        end_loc['end_loc_lat'] = end_loc['geohashed_end_loc'].apply(
            lambda x: Geohash.decode(x)[0])
        end_loc['end_loc_lon'] = end_loc['geohashed_end_loc'].apply(
            lambda x: Geohash.decode(x)[1])
        end_loc['end_loc_lat_box'] = end_loc['end_loc_lat'].apply(
            lambda x: x // 0.003)
        end_loc['end_loc_lon_box'] = end_loc['end_loc_lon'].apply(
            lambda x: x // 0.00375)
        count_of_loc = train.groupby('geohashed_end_loc',
                                     as_index=False)['geohashed_end_loc'].agg(
                                         {'count_of_loc': 'count'})
        end_loc = pd.merge(end_loc,
                           count_of_loc,
                           on='geohashed_end_loc',
                           how='left')
        max_index = end_loc.groupby(
            ['end_loc_lat_box',
             'end_loc_lon_box']).apply(lambda x: x['count_of_loc'].argmax())
        end_loc = end_loc.loc[
            max_index.tolist(),
            ['geohashed_end_loc', 'end_loc_lat', 'end_loc_lon']]
        end_loc.sort_values('end_loc_lat', inplace=True)
        end_loc = end_loc.values
        start_loc = pd.DataFrame({
            'geohashed_start_loc':
            list(pd.concat([train, test])['geohashed_start_loc'].unique())
        })
        start_loc['start_loc_lat'] = start_loc['geohashed_start_loc'].apply(
            lambda x: Geohash.decode(x)[0])
        start_loc['start_loc_lon'] = start_loc['geohashed_start_loc'].apply(
            lambda x: Geohash.decode(x)[1])
        start_loc = start_loc.values
        start_end_loc_arr = []
        for i in start_loc:
            for j in end_loc:
                if (np.abs(i[1] - j[1]) < 0.012) & (np.abs(i[2] - j[2]) <
                                                    0.015):
                    start_end_loc_arr.append([i[0], j[0]])
        result = pd.DataFrame(
            start_end_loc_arr,
            columns=['geohashed_start_loc', 'geohashed_end_loc'])
        result.to_hdf(result_path, 'w', complib='blosc', complevel=5)
    return result
Example #8
0
def get_latlon(result):
    eloc_latlon = result['geohashed_end_loc'].apply(
        lambda x: Geohash.decode_exactly(x))
    result['eloc_lat'] = eloc_latlon.apply(lambda x: float(x[0]))
    result['eloc_lon'] = eloc_latlon.apply(lambda x: float(x[1]))
    sloc_latlon = result['geohashed_start_loc'].apply(
        lambda x: Geohash.decode_exactly(x))
    result['sloc_lat'] = sloc_latlon.apply(lambda x: float(x[0]))
    result['sloc_lon'] = sloc_latlon.apply(lambda x: float(x[1]))

    result['eloc_sloc_lat_sub'] = result['eloc_lat'] - result['sloc_lat']
    result['eloc_sloc_lon_sub'] = result['eloc_lon'] - result['sloc_lon']
    return result
Example #9
0
 def handle_event(self, event, sender, level, formatted_msg, data):
     if self.mqttc == None:
         return
     #sender_name = type(sender).__name__
     #if formatted_msg:
     #    message = "[{}] {}".format(event, formatted_msg)
     #else:
     #message = '{}: {}'.format(event, str(data))
     if event == 'catchable_pokemon':
         #self.mqttc.publish("pgomapcatch/all", str(data))
         #print data
         if data['pokemon_id']:
             #self.mqttc.publish("pgomapcatch/all/catchable/"+str(data['pokemon_id']), str(data))
             # precision=4 mean 19545 meters, http://stackoverflow.com/questions/13836416/geohash-and-max-distance
             geo_hash = Geohash.encode(data['latitude'],
                                       data['longitude'],
                                       precision=4)
             self.mqttc.publish(
                 "pgomapgeo/" + geo_hash + "/" + str(data['pokemon_id']),
                 str(data['latitude']) + "," + str(data['longitude']) +
                 "," + str(data['encounter_id']) + "," +
                 str(data['pokemon_id']) + "," +
                 str(data['expiration_timestamp_ms']) + "," +
                 str(data['pokemon_name']))
             self.mqttc.publish(
                 "pgomapcatch/all/catchable/" + str(data['pokemon_id']),
                 str(data['latitude']) + "," + str(data['longitude']) +
                 "," + str(data['encounter_id']) + "," +
                 str(data['pokemon_id']) + "," +
                 str(data['expiration_timestamp_ms']) + "," +
                 str(data['pokemon_name']))
Example #10
0
def prediction_to_geohash2():
    predict = np.load("prediction.npy")

    data = pd.read_csv("test.csv")
    data = list(data["orderid"])

    print(len(data))

    print(predict.shape[0])

    label = []
    num = predict.shape[0]
    #num = 100
    for i in tqdm.trange(num):
        temp = predict[i]
        label.append(gh.encode(temp[0], temp[1], precision=7))
    result = ''
    for i in tqdm.trange(len(label)):
        a = random.randint(a=0, b=len(label) - 1)
        b = random.randint(a=0, b=len(label) - 1)

        # 这里只是尝试,由于lable不允许重复,所以添加随机
        #TODO:尝试用pandas写入而不要用字符串写入文件

        result = result + str(
            data[i]) + "," + label[i] + "," + label[a] + "," + label[b] + "\n"

    with open("submission.csv", "w") as f:
        f.write(result)
Example #11
0
def testhashgeo():
    # Testing hashgeo
    cookies = configparser.get('google', 'cookies')
    headers = configparser.get('google', 'headers')

    url = "https://www.google.com/maps/timeline/kml?authuser=0&pb=!1m8!1m3!1i{0}!2i{1}!3i{2}!2m3!1i{0}!2i{1}!3i{2}".format(year, month, day)

    r = requests.get(url, headers=headers, cookies=cookies)

    o = xmltodict.parse(r.text)

    name = o['kml']['Document']['name']

    keys = []
    values = []

    when = o['kml']['Document']['Placemark']['gx:Track']['when']

    for i in when:
        newdate = (parser.parse(i)).astimezone(tz.tzlocal())
        a = newdate.strftime("%Y-%m-%d %H:%M:%S")
        keys.append(a)

    where = o['kml']['Document']['Placemark']['gx:Track']['gx:coord']
    for i in where:
        lon, lat = i[:-2].split()
        coord = lat + "," + lon
        hashtest = Geohash.encode(float(lat), float(lon))
        print str(coord) + " - " + str(hashtest)
def putDataInfoMysql():

    sqlConnection = my.connect(host="amina.uhurulabs.org",
                               user="******",
                               passwd="",
                               db="sensors")
    cursor = sqlConnection.cursor()

    for key in data["stations"]:
        elevation = str(key.get("elevation"))
        name = str(key.get("name"))
        lng = str(key["location"].get("lng"))
        lat = str(key["location"].get("lat"))
        lastMeasurement = str(key.get("lastMeasurement"))
        firstMeasurement = str(key.get("firstMeasurement"))
        timezoneOffset = str(key.get("timezoneOffset"))
        battery = str(key.get("battery"))
        tahmoId = str(key.get("id"))
        deviceId = str(key.get("deviceId"))
        first = convertToUnixTimeStamp(firstMeasurement)
        gHash = Geohash.encode((key["location"].get("lat")),
                               (key["location"].get("lng")),
                               precision=10)
        #print tahmoId, deviceId, name, lng, lat, elevation, battery, firstMeasurement, first, lastMeasurement, gHash
        sqlCommand = "INSERT INTO weatherstations (tahmoId, name, longitude, latitude, elevation, battery, deviceId, firstMeasurement, lastMeasurement, geohash) \
                VALUES ('" + tahmoId + "','" + name + "','" + lng + "','" + lat + "','" + elevation + "','" + battery + "','" + deviceId + "','" + firstMeasurement + "','" + lastMeasurement + "','" + gHash + "')"
        print sqlCommand

        try:
            cursor.execute(sqlCommand)
            sqlConnection.commit()
        except my.Error as e:
            print e
            sqlConnection.rollback()
    sqlConnection.close()
Example #13
0
def update_restaurant(restaurant):
    print('Updating DynamoDB with restaurant {} in table {}'.format(
        restaurant['restaurant-id'], config.restaurants_dynamodb_table_name))

    geohash = Geohash.encode(
        restaurant['restaurant-location']['lat'],
        restaurant['restaurant-location']['lng'],
        precision=config.restaurants_dynamodb_geohash_precision)

    print('DYNAMODB: Adding restaurant: {} ({})'.format(
        restaurant['restaurant-id'], geohash))

    response = config.restaurants_dynamodb_table.update_item(
        Key={
            config.restaurants_pkey: geohash,
            config.restaurants_skey: restaurant['restaurant-id']
        },
        UpdateExpression="SET #restaurant_name = :restaurant_name",
        ExpressionAttributeNames={'#restaurant_name': 'restaurant-name'},
        ExpressionAttributeValues={
            ':restaurant_name': restaurant['restaurant-name']
        },
        ReturnValues="UPDATED_NEW")

    return response
Example #14
0
def data2influxdb(dataset_title, organization_title, dateTime, ip):
    dataset = dataset_title
    organization = organization_title
    geoIPdata = reader.get(ip)
    
    try:
        lat = float(geoIPdata['location']['latitude'])
        lon = float(geoIPdata['location']['longitude'])
        geohash_data = Geohash.encode(float(lat), float(lon))
    except:
        geohash_data = float(0)

    try:
        country = geoIPdata['registered_country']['iso_code']
    except:
        country = 'null'
    try:    
        city = geoIPdata['city']['names']['en']
    except:
        city = 'null'

    access_log = [{
        'measurement' : 'access',
        'tags': { 'dataset': dataset,
                 'organization' : organization,
                 'geohash' : geohash_data,
                 'country' : country,
                 'city' : city
               },
        'fields' : {'value' : 1},
        'time' : dateTime
    }]
    print(access_log)
    client.write_points(access_log)
Example #15
0
def to_influxdb_point(station_data):
    return {
        'measurement':
        'bikes',
        'tags': {
            'scheme_id': station_data['schemeId'],
            'scheme_name': station_data['schemeShortName'],
            'station_id': station_data['stationId'],
            'station_name': station_data['name'],
            'station_name_irish': station_data['nameIrish']
        },
        'time':
        TZ_CORK.localize(
            dateutil.parser.parse(station_data['dateStatus'],
                                  dayfirst=True)).isoformat(),
        'fields': {
            'bikes_available':
            station_data['bikesAvailable'],
            'docks_available':
            station_data['docksAvailable'],
            'docks_count':
            station_data['docksCount'],
            'station_status':
            station_data['status'],
            'latitude':
            float(station_data['latitude']),
            'longitude':
            float(station_data['longitude']),
            'geohash':
            Geohash.encode(float(station_data['latitude']),
                           float(station_data['longitude']), 12)
        }
    }
Example #16
0
def get_distance(result):
    result_path = cache_path + 'distance_feat_%d.hdf' % (result.shape[0])
    if os.path.exists(result_path) & flag:
        temp = pd.read_hdf(result_path, 'w')
        result = pd.merge(result, temp, on=['orderid', 'geohashed_end_loc'], how='left')
    else:
        locs = list(set(result['geohashed_start_loc']) | set(result['geohashed_end_loc']))
        if np.nan in locs:
            locs.remove(np.nan)
        deloc = []
        for loc in locs:
            deloc.append(Geohash.decode(loc))
        loc_dict = dict(zip(locs,deloc))
        geohashed_loc = result[['geohashed_start_loc','geohashed_end_loc']].values
        distance = []
        mht_distance = []
        for i in geohashed_loc:
            lat1, lon1 = loc_dict[i[0]]
            lat2, lon2 = loc_dict[i[1]]
            distance.append(cal_distance(lat1,lon1,lat2,lon2))
            mht_distance.append(cal_mht_distance(lat1,lon1,lat2,lon2))
        result['distance'] = distance
        result['mht_distance'] = mht_distance
        result[['orderid','geohashed_end_loc','distance','mht_distance']].to_hdf(result_path, 'w', complib='blosc', complevel=5)
    return result
Example #17
0
def authorize_spotify():
    # First check we have a location
    location = "New York, NY"  # request.form['location']
    if not location:
        index()

    payload = {'address': location,
               'key': BaseConfig.GOOGLE_GEOCODING}
    GOOGLE_GEOCODE_URL = "https://maps.googleapis.com/maps/api/geocode/json"
    urlparams = urllib.urlencode(payload)
    r = requests.get("%s?%s" % (GOOGLE_GEOCODE_URL, urlparams))
    if not r.status_code == 200:
        return "Error"
    json_data = json.loads(r.text)
    lat = json_data["results"][0]["geometry"]["location"]["lat"]
    lng = json_data["results"][0]["geometry"]["location"]["lng"]
    session['geohash'] = Geohash.encode(lat, lng)

    # TODO implement state & show_dialog
    # https://github.com/plamere/spotipy/issues/211
    # stateKey = 'spotify_auth_state'
    # state = ''.join(choice(ascii_uppercase) for i in range(16))
    # res.cookie(stateKey, state)
    oauth = oauth2.SpotifyOAuth(
        client_id=BaseConfig.SPOTIFY_CLIENT_ID,
        client_secret=BaseConfig.SPOTIFY_CLIENT_SECRET,
        redirect_uri=BaseConfig.SPOTIFY_REDIRECT_URL,
        scope='user-library-read')

    return redirect(oauth.get_authorize_url())
    def test_proximity(self):

        famhash = Geohash.encode(self.family[0], self.family[1], precision=20)
        nexthash = Geohash.encode(self.next_door[0],
                                  self.next_door[1],
                                  precision=20)
        sturthash = Geohash.encode(self.sturt_mall[0],
                                   self.sturt_mall[1],
                                   precision=20)
        rockhash = Geohash.encode(self.the_rock[0],
                                  self.the_rock[1],
                                  precision=20)

        assert famhash[:5] == nexthash[:5]
        assert famhash[:4] == sturthash[:4]
        assert famhash[:2] == rockhash[:2]
Example #19
0
 def parse_category(self, response):
     json_data = json.loads(response.body)
     cates = self.__parse_category(json_data)
     geo_hash_val = geohash.encode(response.meta['item']['lat'],
                                   response.meta['item']['lng'],
                                   precision=11)
     for cate in cates:
         cate_id = cate['cate1_id']
         if 'cate2_id' in cate:
             cate_id = cate['cate2_id']
         url = "https://mainsite-restapi.ele.me/shopping/restaurants?extras%5B%5D=activities"
         url += "&geohash=%s&latitude=%s&limit=24&longitude=%s" % (
             geo_hash_val, response.meta['item']['lat'],
             response.meta['item']['lng'])
         url += "&restaurant_category_ids%%5B%%5D=%s&sign=%s&offset=0" % (
             cate_id, int(time.time() * 1000))
         request_context = {
             'url': url,
             'grab_type': GRAB_TYPE_SHOPLIST,
             'cate1': cate['cate1_name'],
             'city': response.meta['item']['city'],
             'spot_id': response.meta['item']['spot_id'],
             'spot_name': response.meta['item']['spot_name'],
             'lng': response.meta['item']['lng'],
             'lat': response.meta['item']['lat'],
             'offset': 0
         }
         if 'cate2_id' in cate:
             request_context['cate2'] = cate['cate2_name']
         yield scrapy.Request(url,
                              dont_filter=True,
                              callback=self.parse_shop,
                              meta={'item': request_context})
Example #20
0
def addUsers(response):
    for i in response["hits"]["hits"]:
        text = ""
        lat = round(i["_source"]["location"]["latitude"], 3)
        lon = round(i["_source"]["location"]["longitude"], 3)
        ghash = Geohash.encode(lat, lon)
        link = i["_source"]["link"]
        id = i["_id"]
        print id
        datetime = i["_source"]["created_time"]
        words = []
        if i["_source"]["caption"]:
            text = i["_source"]["caption"]["text"].encode("ascii", "ignore").replace("\n", " ").replace("\t", " ")
            # words = i['_source']['caption']['text'].encode('ascii','ignore').replace('\n',' ').replace('\t',' ').lower().split()
        username = i["_source"]["user"]["username"]
        output.write(
            "\t".join(
                (
                    id,
                    str(lat),
                    str(lon),
                    ghash,
                    link,
                    str(datetime),
                    username,
                    text,
                    i["_source"]["images"]["standard_resolution"]["url"],
                )
            )
            + "\n"
        )
Example #21
0
    def start_requests(self):
        conn = pymysql.connect(host='10.15.1.24', user='******', passwd='hh$writer', db='o2o', charset='utf8',
                               connect_timeout=5000, cursorclass=pymysql.cursors.DictCursor)
        cur = conn.cursor()
        sql = '''
                select city, spot_name, spot_id, lng, lat
                from t_hh_gaode_hotspots limit 10;
              '''
        cur.execute(sql)
        temps = cur.fetchall()
        for i,r in enumerate(temps):
            if r['lng']<r['lat']:
                a=r['lng']
                r['lng']=r['lat']
                r['lat']=a
            a=Geohash.encode(r['lat'], r['lng'], 12)
            url='http://waimai.meituan.com/home/' +a


            headers={
                'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
                'Accept-Encoding': 'gzip, deflate',
                'Accept-Language': 'zh-CN,zh;q=0.8',
                'Host': 'waimai.meituan.com',
                'Upgrade-Insecure-Requests': '1',
                "User-Agent": "Mozilla/5.0 (MSIE 9.0; Windows NT 6.1; Trident/5.0)",
            }
            yield scrapy.Request(url,headers=headers,meta={'item':{'lat':r['lat'],'lng':r['lng'],
                                                            'spot_name':r['spot_name'],'base_url':url},
                                                           },dont_filter=True)
Example #22
0
 def put(self):
     hour = int(request.form['hour'])
     date = request.form['date']
     prcp = float(request.form['prcp'])*100
     snow = float(request.form['snow']) * 10
     tmax = float(request.form['tmax']) * 10
     tmin = float(request.form['tmin']) * 10
     date = pd.to_datetime(date)
     with open(os.path.join(APP_STATIC, 'uniquegeohash.pkl'), 'rb') as f:
         uniquegeohash = dill.load(f)
     with open(os.path.join(APP_STATIC, 'predict_pickup_density.pkl'), 'rb') as f:
         model = dill.load(f)
     x_dict = [{"pickup_geohash": geostr, "hour": hour, "dayofweek": date.dayofweek, 'month': date.month,'PRCP':prcp,'SNOW':snow,'TMAX':tmax,'TMIN':tmin} for geostr in uniquegeohash]
     x_df = pd.DataFrame(x_dict)
     y = model.predict(x_df)
     geodecode = [Geohash.decode(geocode) for geocode in uniquegeohash]
     yzipgeo = zip(y, geodecode)
     sortedlist = sorted(yzipgeo, key=lambda x: -x[0])
     top10address = []
     top10dict = {}
     for y, geodecode in sortedlist[0:50]:
         key = ",".join(geodecode)
         top10dict[key] = top10dict.get(key,0) + y
     top10res = []
     for key in top10dict:
         temptuple = (float(key.split(",")[0]),float(key.split(",")[1]))
         top10res.append([top10dict[key],temptuple])
     top10res = sorted(top10res,key=lambda x:-x[0])
     top10res = top10res[0:10] if len(top10res) > 10 else top10res
     for u,geodecode in top10res:
         g = geocoder.google([geodecode[0], geodecode[1]], method='reverse').address
         top10address.append(g)
     return {"top10": top10res,"top10address":top10address}
    def insert_into_all_tables(self, data):
        column_names = [
            "ciudad", "pais_destino", "divisa", "competidor", "comision",
            "tasa_cambio", "timestamp", "lat", "lon", "num_agente",
            "importe_nominal"
        ]
        column_values = []
        for i in column_names:
            column_values.append(data[i])

        column_names.append("importe_destino")
        importe_destino = 100 * float(data["tasa_cambio"]) - float(
            data["comision"])
        column_values.append(importe_destino)

        for table_name in self.tables.keys():
            if table_name.startswith("geo"):
                column_names.append("geohash")
                column_values.append(
                    Geohash.encode(float(data['lat']), float(data['lon'])))
                self._insert_data(table_name, column_names, column_values)
                # Remove last items.
                column_names.pop(-1)
                column_values.pop(-1)
            else:
                self._insert_data(table_name, column_names, column_values)
    def construct_geo_location_query(lon=None,
                                     lat=None,
                                     distance=None,
                                     dist_unit=None):
        """
        Returns a geo location query for Couchbase and Elastic search
        """
        from lib.couchbase_helper.data import LON_LAT
        if not lon:
            lon_lat = random.choice(LON_LAT)
            lon = lon_lat[0]
            lat = lon_lat[1]
            distance = random.choice([10, 100, 500, 1000, 10000])
            dist_unit = random.choice(["km", "mi"])

        fts_query = {
            "location": {
                "lon": lon,
                "lat": lat
            },
            "distance": str(distance) + dist_unit,
            "field": "geo"
        }

        es_query = {
            "query": {
                "match_all": {}
            },
            "filter": {
                "geo_distance": {
                    "distance": str(distance) + dist_unit,
                    "geo": {
                        "lat": lat,
                        "lon": lon
                    }
                }
            }
        }

        case = random.randint(0, 3)

        # Geo Location as array
        if case == 1:
            fts_query['location'] = [lon, lat]
            es_query['filter']['geo_distance']['geo'] = [lon, lat]

        # Geo Location as string
        if case == 2:
            fts_query['location'] = "{0},{1}".format(lat, lon)
            es_query['filter']['geo_distance']['geo'] = "{0},{1}".format(
                lat, lon)

        # Geo Location as Geohash
        if case == 3:
            geohash = Geohash.encode(lat, lon, precision=random.randint(3, 8))
            fts_query['location'] = geohash
            es_query['filter']['geo_distance']['geo'] = geohash

        # Geo Location as an object of lat and lon if case == 0
        return fts_query, es_query
Example #25
0
def createLocationObservation(iesGraph,mmsi,timestamp,lat,lon,obs=None):
    print(mmsi,timestamp)
    #add the location transponder - We don't know this is necessarily a vessel. All we know is that we have a LocationTransponder. 
    lt = createLocationTransponder(iesGraph=iesGraph,mmsi=mmsi)
    #Now create the observation event
    lo = instantiate(iesGraph=iesGraph,_class=locationObservation)
    #If track emulation is not required, obs will be None. If it's not None, make the LocationObservation (lo) part of the overall track observation
    if obs:
        addToGraph(iesGraph=iesGraph,subject=lo,predicate=ipao,obj=obs)
    #...and the ParticularPeriod in which the observation occurred
    putInPeriod(iesGraph=iesGraph,item=lo,iso8601TimeString=timestamp)
    #And involve the transponder in that location observation
    ltPart = instantiate(iesGraph=iesGraph,_class=observedTarget)
    addToGraph(iesGraph=iesGraph,subject=ltPart,predicate=ipo,obj=lt) #participation of the transponder
    addToGraph(iesGraph=iesGraph,subject=ltPart,predicate=ipi,obj=lo) #participation in the LocationObservation
    #Now the observed location, a geopoint with a lat and long - using a geohash to give each point a unique uri
    gp = URIRef(dataUri+"geohash_"+Geohash.encode(lat,lon))
    instantiate(iesGraph=iesGraph,_class=geoPoint,instance=gp)
    #Add the lat and long values as identifiers of the geopoint...firstly creating repeatable URIs for them so they don't overwrite
    latObj = URIRef(gp.toPython()+"_lat")
    lonObj = URIRef(gp.toPython()+"_lon")
    instantiate(iesGraph=iesGraph, _class=latitude,instance=latObj)
    instantiate(iesGraph=iesGraph, _class=longitude,instance=lonObj)
    addToGraph(iesGraph=iesGraph,subject=gp,predicate=iib,obj=latObj)
    addToGraph(iesGraph=iesGraph,subject=gp,predicate=iib,obj=lonObj)
    #Add the representation values to the lat and lon objects
    addToGraph(iesGraph=iesGraph,subject=latObj,predicate=rv,obj=Literal(lat, datatype=XSD.string))
    addToGraph(iesGraph=iesGraph,subject=lonObj,predicate=rv,obj=Literal(lon, datatype=XSD.string))
    #Now the participation of the GeoPoint in the Observation
    gpPart = instantiate(iesGraph=iesGraph,_class=observedLocation)
    addToGraph(iesGraph=iesGraph,subject=gpPart,predicate=ipo,obj=gp) #participation of the GeoPoint
    addToGraph(iesGraph=iesGraph,subject=gpPart,predicate=ipi,obj=lo) #participation in the LocationObservation
Example #26
0
def get_distance(result):
    locs = list(
        set(result['geohashed_start_loc']) | set(result['geohashed_end_loc']))
    if np.nan in locs:
        locs.remove(np.nan)
    deloc = []
    for loc in locs:
        deloc.append(geohash.decode_exactly(loc))
    loc_dict = dict(zip(locs, deloc))
    geohashed_loc = result[['geohashed_start_loc', 'geohashed_end_loc']].values
    distance = []
    manhattan_distance = []
    for i in geohashed_loc:
        if i[0] is not np.nan and i[1] is not np.nan:
            lat1, lon1, _, _ = loc_dict[i[0]]
            lat2, lon2, _, _ = loc_dict[i[1]]
            distance.append(
                cal_distance(float(lat1), float(lon1), float(lat2),
                             float(lon2)))
            manhattan_distance.append(
                manhattan(float(lat1), float(lon1), float(lat2), float(lon2)))
        else:
            distance.append(np.nan)
            manhattan_distance.append(np.nan)
    result.loc[:, 'distance'] = distance
    result.loc[:, 'manhattan'] = manhattan_distance
    return result
Example #27
0
def get_shop_detail_info1(url):
    try:
        headers = {}
        headers["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36"
        headers["Referer"] = "http://www.dianping.com/search/keyword/1/0_%E8%8E%98%E5%BA%84/p1?aid=20245884%2C6212826%2C15998012"
        headers['Cookie'] = '_lxsdk_cuid=160c6a7a179c8-02393e4445bc62-b7a103e-100200-160c6a7a179c8; _lxsdk=160c6a7a179c8-02393e4445bc62-b7a103e-100200-160c6a7a179c8; cy=1; cye=shanghai; _hc.v=0f32a3b6-7385-0888-333e-5c3045e7cad3.1515161390; s_ViewType=10; aburl=1; wed_user_path=27760|0; __mta=251920054.1515203025830.1515203025830.1515203025830.1; _lxsdk_s=160c901323c-8b8-3a9-dad%7C%7C218'

        content = HttpUtils.get(url, headers, 'utf-8')
	print content

        soup = BeautifulSoup(content, "lxml")
#        shop_expand_addr = soup.find(id="basic-info").find_all("div", "address")[0].find_all("span", "item")[0].get_text().strip()

	#tel
        shop_expand_tel = soup.find(id="basic-info").find_all("p", "tel")[0].find_all("span", "item")[0].get_text().strip()

	#open_time
	shop_expand_open = ''
	infos = soup.find(id="basic-info").find_all("div", "other")[0].find_all("p", "info")
	if infos is not None and len(infos) > 0:
	    for info in infos:
		text = info.find_all("span", "info-name")[0].get_text().encode('utf-8')
		if text.find('时间') > 0:
		    shop_expand_open = info.find_all("span", "item")[0].get_text().strip()
		    break

	#photo
	photo_url = 'http://www.dianping.com/ajax/json/shopDynamic/shopTabs?shopId=90951360&cityId=1&shopName=Regiustea%E5%A4%A9%E5%BE%A1%E7%9A%87%E8%8C%B6&power=5&mainCategoryId=244&shopType=10&shopCityId=1&_token=eJxVTt1ugjAYfZdeN9ACbYHEC3WOieKGMowzXiAwJEAtlAx12buvJu5iyZecn%2B%2Bc5HyDbp4BFyOELAzBV94BF2ANaRRA0Ev1IZgSzBgxqGNBkP73mG1CcOziJ%2BDuiYMgNenhbqyV3mPDRtBG6AAVNShkjB2gYam7Z%2BYqAk59L1xdH4ZBy8qEi5IXWnpudHk6C91BDsEmRWoJUI0mUg2F1QOTB%2FZ%2FOlDTVVaWBVcs9y%2FRRlqy%2FVwHMorfr8gMbi%2Br1%2BWsXt2u9nS6Lj%2BaRRETP%2BfPfZO0p5iTSWoNzmbyOpt6YufVbeJdmtDbMZLvGuN8rP0238YirURnZQFfCF1u34Qe663PPVxQsZwXODuG4bgaR3Ul8nA0Aj%2B%2FFDJm5g%3D%3D&uuid=40e49af0-9fce-a1c6-0e90-dbacb64cb3cb.1516376513&platform=1&partner=150&originUrl=http%3A%2F%2Fwww.dianping.com%2Fshop%2F90951360'
	shop_style_photos = []

        #map
        shop_extra = {}
	if content.find("window.shop_config") > -1:
	    js_str_start = content.find("window.shop_config")
	    js_str_suf = content[js_str_start:]
	    js_str_start = js_str_suf.find("{")
	    js_str_end = js_str_suf.find("</script>")
	    js_str = js_str_suf[js_str_start : js_str_end]
	    json_obj = jsonpickle.decode(js_str)
	    shop_extra['shop_name']  = json_obj['shopName']
	    shop_extra['shop_addr']  = json_obj['address']
	    shop_extra['shop_id'] =  json_obj['shopId']
	    shop_extra['shop_full_name'] =  json_obj['fullName']
	    shop_extra['shop_lat'] = json_obj['shopGlat']
	    shop_extra['shop_lng'] = json_obj['shopGlng']
	    shop_extra['shop_type'] = json_obj['shopType']
	    shop_extra['shop_first_category'] = json_obj['categoryName']
	    shop_extra['shop_second_category'] = json_obj['mainCategoryName']
	    shop_extra['shop_logo'] = json_obj['defaultPic'][:json_obj['defaultPic'].find('.jpg')+4]
	    shop_extra['shop_geohash'] = Geohash.encode(float(json_obj['shopGlat']), float(json_obj['shopGlng']), 8) 
	shop_extra['shop_tel'] = shop_expand_tel
	shop_extra['shop_open'] = shop_expand_open
	shop_extra['shop_photos'] = shop_style_photos

	return shop_extra
	
    except Exception, e:
        traceback.print_exc(e)
        print url
Example #28
0
def logparse(LOGPATH, INFLUXHOST, INFLUXPORT, INFLUXDBDB, INFLUXUSER,
             INFLUXUSERPASS, MEASUREMENT, GEOIPDB, INODE, HOSTNAME):  # NOQA
    # Preparing variables and params
    IPS = {}
    COUNT = {}
    GEOHASH = {}
    # HOSTNAME = os.uname()[1]
    CLIENT = InfluxDBClient(host=INFLUXHOST,
                            port=INFLUXPORT,
                            username=INFLUXUSER,
                            password=INFLUXUSERPASS,
                            database=INFLUXDBDB)  # NOQA

    re_IPV4 = re.compile('(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})')
    re_IPV6 = re.compile(
        '(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))'
    )  # NOQA

    GI = geoip2.database.Reader(GEOIPDB)

    # Main loop to parse access.log file in tailf style with sending metrcs
    with open(LOGPATH, "r") as FILE:
        STR_RESULTS = os.stat(LOGPATH)
        ST_SIZE = STR_RESULTS[6]
        FILE.seek(ST_SIZE)
        while True:
            METRICS = []
            WHERE = FILE.tell()
            LINE = FILE.readline()
            INODENEW = os.stat(LOGPATH).st_ino
            if INODE != INODENEW:
                break
            if not LINE:
                time.sleep(1)
                FILE.seek(WHERE)
            else:
                if re_IPV4.match(LINE):
                    m = re_IPV4.match(LINE)
                    IP = m.group(1)
                elif re_IPV6.match(LINE):
                    m = re_IPV6.match(LINE)
                    IP = m.group(1)

                if ipadd(IP).iptype() == 'PUBLIC' and IP:
                    INFO = GI.city(IP)
                    if INFO is not None:
                        HASH = Geohash.encode(INFO.location.latitude,
                                              INFO.location.longitude)  # NOQA
                        COUNT['count'] = 1
                        GEOHASH['geohash'] = HASH
                        GEOHASH['host'] = HOSTNAME
                        GEOHASH['country_code'] = INFO.country.iso_code
                        IPS['tags'] = GEOHASH
                        IPS['fields'] = COUNT
                        IPS['measurement'] = MEASUREMENT
                        METRICS.append(IPS)

                        # Sending json data to InfluxDB
                        CLIENT.write_points(METRICS)
Example #29
0
    def update_geohash(self):
        """
		Sets the geohash to correspond to the coordinates stored in self.geo.
		"""
        if self.geo:
            self.geohash = Geohash.encode(self.geo.lat,
                                          self.geo.lon,
                                          precision=20)
 def hotspots(self, family, name):
     query = Search(using=self.client, index=family)
     if name:
         query = query.filter('term', name=name)
     query = query.filter('range', timestamp={'gte':self.lookback})
     query.aggs.bucket('hotspot', 'geohash_grid', field='location', precision=7)
     hashes = query[0].execute().aggregations['hotspot']['buckets'][:3]
     return [Geohash.decode_exactly(hash['key'])[:2] for hash in hashes]
Example #31
0
def indexEvents(x):
    
    geoh = x['geohash']
    lat = Geohash.decode_exactly(geoh)[0]
    lon = Geohash.decode_exactly(geoh)[1]
    for e in x['data']:
        rec = {}
        rec['geoloc'] = {'lat':lat, 'lon': lon}
        rec['geohash'] = geoh
        rec['tags'] = []
        rec['count'] = e['count']
        rec['datetime'] = e['event']
        rec['images'] = e['likes']
        print e
        for tag in e['tags'].keys():
            rec['tags'].append({"name":tag, "count":e['tags'][tag]})
	es.index(index='instagram_events_j_final',doc_type='dc',body=rec)        
def features():
    obj = request.get_json(force=True)
    lat = float(obj["lat"])
    lon = float(obj["lon"])
    amenity = obj["amenity"]
    geohash = Geohash.encode(lat, lon)
    obj["geohash"] = geohash
    print(json.dumps(obj))
    sql = """
  WITH q1 AS
  (
    SELECT
      name,
      ST_Distance(ST_MakePoint(%s, %s)::GEOGRAPHY, ref_point)::NUMERIC(9, 2) dist_m,
      ST_Y(ref_point::GEOMETRY) lat,
      ST_X(ref_point::GEOMETRY) lon,
      date_time,
      key_value
    FROM osm
    WHERE
  """
    if useGeohash:
        sql += "geohash4 = SUBSTRING(%s FOR 4) AND amenity = %s"
    else:
        sql += "ST_DWithin(ST_MakePoint(%s, %s)::GEOGRAPHY, ref_point, 5.0E+03, TRUE) AND key_value && ARRAY[%s]"
    sql += """
  )
  SELECT * FROM q1
  """
    if useGeohash:
        sql += "WHERE dist_m < 5.0E+03"
    sql += """
  ORDER BY dist_m ASC
  LIMIT 10;
  """
    rv = []
    conn = get_db()
    #print("SQL:\n" + sql + "\n")
    with conn.cursor() as cur:
        try:
            if useGeohash:
                cur.execute(sql, (lon, lat, geohash, amenity))
            else:
                cur.execute(sql, (lon, lat, lon, lat, "amenity=" + amenity))
            for row in cur:
                (name, dist_m, lat, lon, dt, kv) = row
                d = {}
                d["name"] = name
                d["amenity"] = amenity
                d["dist_m"] = str(dist_m)
                d["lat"] = lat
                d["lon"] = lon
                #print("Feature: " + json.dumps(d))
                rv.append(d)
        except:
            logging.debug("Search: status message: {}".format(
                cur.statusmessage))
    return Response(json.dumps(rv), status=200, mimetype="application/json")
Example #33
0
 def get_around_geohashs(self):
     around_geohashs = {}
     self._get_min_lat_unit()
     self._get_min_long_unit()
     # north
     around_geohashs['n'] = Geohash.encode(self.lat + self.min_lat_unit,
                                           self.long, self.precision)
     around_geohashs['c'] = Geohash.encode(self.lat, self.long,
                                           self.precision)
     around_geohashs['s'] = Geohash.encode(self.lat,
                                           self.long + self.min_long_unit,
                                           self.precision)
     # northwest
     around_geohashs['nw'] = Geohash.encode(self.lat - self.min_lat_unit,
                                            self.long + self.min_long_unit,
                                            self.precision)
     # southwest
     around_geohashs['sw'] = Geohash.encode(self.lat - self.min_lat_unit,
                                            self.long - self.min_long_unit,
                                            self.precision)
     # northeast
     around_geohashs['ne'] = Geohash.encode(self.lat + self.min_lat_unit,
                                            self.long + self.min_long_unit,
                                            self.precision)
     # southeast
     around_geohashs['se'] = Geohash.encode(self.lat + self.min_lat_unit,
                                            self.long - self.min_long_unit,
                                            self.precision)
     return around_geohashs
Example #34
0
def get_geohash_index(points, precision=24):
    """
    get a list of geohash
    :param points: a list of GPS points
    :param precision: the length of a geohash binary string, the longer the string is the more precise the geohash is
    :return: a list of geohash (binary strings)
    """
    geo_index = [Geohash.encode(latitude=p[1], longitude=p[0], precision=precision) for p in points]
    return geo_index
Example #35
0
 def save(self, *args, **kwargs):
     # if not self.point and not self.area:
     #     raise ValidationError(_(u"You must at least set a point or choose "
     #                             u"an area."))
     if not self.label:
         self.label = self.adr1
     if self.point:
         self.geohash = Geohash.encode(self.point.y, self.point.x)
     return super(Location, self).save(*args, **kwargs)
Example #36
0
def vpnPlusOnlineSessions(token):
    url = baseurl + '/webapi/entry.cgi'
    headers = {
           'Content-Type': 'application/x-www-form-urlencoded'
        }
    body = {
            'Content-Type': 'application/x-www-form-urlencoded',
            'api':'SYNO.VPNPlus.Connectivity',
            'version':'1',
            'method':'list',
            'status':'online',
            '_sid':token
            }   
    r = session.post(url, headers=headers, data=body)
    jsonDic = json.loads(r.text)
    result = jsonDic['data']['cnt_list']

    influxPayloadVpnPlusActiveSessions= []
    for item in result:
        if item['ip_from'] != "0.0.0.0":
            INFO = GI.city(item['ip_from'])           
            HASH = Geohash.encode(INFO.location.latitude, INFO.location.longitude)
            country_code = INFO.country.iso_code
            if INFO.city.name != "" and INFO.city.name is not None:
                location_name = INFO.city.name
            else:
                location_name = country_code   
        else:
            HASH = "Blocked"
            location_name = "Blocked"
            country_code = "Blocked"            
        influxPayloadVpnPlusActiveSessions.append(
            {
                    "measurement": "OUTLAND.Remote.Network.VPNPlus",
                    "tags": {           
                                        "username":item['username'],
                                        "geohash": HASH,
                                        "country_code":country_code, 
                                        "location_name":location_name,                                                                                                    
                                        "externalIP":item['ip_from'],                                    
                                        "internalIP":item['signature']                                    
                            },
                            "time": now_iso(),
                            "fields": { 
                                        "externalIP":item['ip_from'],                                    
                                        "internalIP":item['signature'],
                                        "abnormal":item['abnormal'],                                                                       
                                        "download":item['download'],                                                                       
                                        "upload":item['upload'],
                                        "count": 1,
                                        "time_duration":item['time_duration'],
                                        "time_start":datetime.fromtimestamp(float(item['time_start'])).strftime('%Y-%m-%d %H:%M:%S')
                                    }
                            }                   
        )
    influx_sender(influxPayloadVpnPlusActiveSessions,'telegraf')
    return influxPayloadVpnPlusActiveSessions
Example #37
0
def geohash_encoding(gcjLat, gcjLng, precision=12):
    if not isinstance(gcjLat, float) or not isinstance(gcjLng, float):
        raise ValueError("input lat and lng is not float")
    if not isinstance(precision, int):
        raise ValueError("input precision is not int")
    if not 1 <= precision <= 12:
        raise ValueError("precision must between 1 and 12")
    wgsLat, wgsLng = gcj2wgs(gcjLat, gcjLng)
    return Geohash.encode(wgsLat, wgsLng, precision)
Example #38
0
 def save(self, *args, **kwargs):
     # if not self.point and not self.area:
     #     raise ValidationError(_(u"You must at least set a point or choose "
     #                             u"an area."))
     if not self.label:
         self.label = self.adr1
     if self.point:
         self.geohash = Geohash.encode(self.point.y, self.point.x)
     return super(Location, self).save(*args, **kwargs)
Example #39
0
 def get(self, point, buffer_size=0, multiple=False):
     """ lookup state and county based on geohash of coordinates from tweet """
     lon, lat = point
     geohash = Geohash.encode(lat, lon, precision=self.precision)
     key = (geohash, buffer_size, multiple)
     if key in self.geohash_cache:
         # cache hit on geohash
         self.hit += 1
         #print self.hit, self.miss
         return self.geohash_cache[key]
     self.miss += 1
     # cache miss on geohash
     # project point to ESRI:102005
     lat, lon = Geohash.decode(geohash)
     proj_point = project([float(lon), float(lat)])
     args = dict(buffer_size=buffer_size, multiple=multiple)
     payload = self.get_object(proj_point, **args)
     self.geohash_cache[key] = payload
     return payload
Example #40
0
 def show_station_coordinfo(self):
     for channel in self.station_coordinfo.keys():
             info = self.station_coordinfo[channel]
             logger.debug("%s (%f, %f, %f) %s | %s",
                          channel,
                          info['latitude'],
                          info['longitude'],
                          info['elevation'],
                          info['geohash'], 
                          Geohash.decode(info['geohash']))
	def geogrid(self, query='*', region='*', min_published_on=None, max_published_on=None, weight=False):
		payload = {
		  "size": 1000,
		  "query": {
		    "bool": {
		      "must": {
		        "query_string": {
		          "fields": ["title", "description"],
		          "query": "*",
		          "analyze_wildcard": True
		        }
		      },
		      "filter": {
		        "bool": {
		          "must": [
		            {
		              "range": {
		                "published_on": {  "format": "epoch_millis",
		                  "gte": min_published_on or self._default_min_published_on,
		                  "lte": max_published_on or self._default_max_published_on
		                }
		              }
		            }
		          ]
		        }
		      }
		    }
		  },
		  "aggs": {
		    "geogrid": {
		      "geohash_grid": {
		        "field": "geo",
		        "precision": 6
		      }
		    }
		  }
		}
		if weight:
			payload['aggs']['geogrid']['aggs'] = {
			  "weight": {
			    "sum": {
			      "field": "geoweight"
			    }
			  }
			}
		self._add_theme(payload, query)
		self._add_region(payload, region)
		geogrid = self.request(payload)
		for row in geogrid['aggregations']['geogrid']['buckets']:
			lat, lon, _, _ = Geohash.decode_exactly(row['key'])
			row['lat'] = lat
			row['lon'] = lon
		return geogrid
Example #42
0
def position_update():
        jsonRequest = request.json

        Position(
                id=ObjectId('556018800640fd52df330d31'),
                lat=jsonRequest['lat'],
                long=jsonRequest['long'],
                geohash=Geohash.encode(jsonRequest['lat'], jsonRequest['long'], precision=16),
                updated=datetime.datetime.utcnow()
        ).save()

        return ('', 204)
Example #43
0
 def setup_start_points_to_trip(self):
     """
     Input: Route_name eg '22'
     Output: Dictionary which maps all of the trips associated with that route 
         to a gps coordinate where the trips start  
     Discussion: 
     TODO:
         This should live in schedule puller class
 
     """
     for trip_id in self.trip_to_dep_arr_times.keys():
         self.start_point_to_trips[trip_id] = Geohash.encode(37.76048,
                                                              -122.38895)
         pass
Example #44
0
    def post(self):

        if cgi.escape(self.request.get('lat')) and cgi.escape(self.request.get('lat')):
            lat = cgi.escape(self.request.get('lat'))
            lon = cgi.escape(self.request.get('lon'))
        if self.request.get('address'):
            lat = 0
            lon = 0
            
        precision = cgi.escape(self.request.get('precision')) or 20

        geohash = Geohash.encode(float(lat), float(lon))
        if geohash.startswith('c2'):
            geohash = geohash.replace('c2', '')
            geohash = geohash[0:int(precision)]
            pdxhash = {'pdxhash': geohash}
        else:
            pdxhash = {'error': 'Not in Portland'}
        self.response.out.write(simplejson.dumps(pdxhash) + "\n")
Example #45
0
def gettile():

    vehicle_map=init_vehicle_map

    tilesresult=getdata.tiles()

    if tilesresult[:8]!="<option>":
        app.logger.debug('Connection error : %s',tilesresult)
        return render_template('gettile.html',vehicle_map=vehicle_map,error=ErrorMessage)
    else:
        alltiles=Markup(tilesresult)

    if request.method == 'POST':

        if request.form['tile']:

            tile=request.form['tile']

            markers_map=getdata.getvehicles_fortile(tile)

            #app.logger.debug('Debugging KILLRTAXI : %s',markers_map)

            if not isinstance(markers_map,list):
                app.logger.debug('Connection error : %s',markers_map)
                return render_template('gettile.html',vehicle_map=vehicle_map,error=ErrorMessage)

            nbmvts=len(markers_map)

            mappos=Geohash.decode(tile)

            vehicle_map = Map(
                identifier="view-side",
                lat=str(float(mappos[0])-0.2),
                lng=str(float(mappos[1])-0.2),
                style="height:700px;width:700px;margin:10;",
                zoom=9,
                markers=markers_map
                #markers=[(54.96848201388808, 0.39963558097359564),(54.968382013888075, -0.39953558097359565)]
            )

            return render_template('gettile.html', alltiles=alltiles,nbmvts=nbmvts,tile=tile,vehicle_map=vehicle_map)

    return render_template('gettile.html',alltiles=alltiles,vehicle_map=vehicle_map)
Example #46
0
def get_distance(result):
    locs = list(set(result['geohashed_start_loc']) | set(result['geohashed_end_loc']))
    if np.nan in locs: 
        locs.remove(np.nan)
    deloc = []
    for loc in locs:
        deloc.append(geohash.decode_exactly(loc))
    loc_dict = dict(zip(locs, deloc))
    geohashed_loc = result[['geohashed_start_loc', 'geohashed_end_loc']].values
    distance = []
    manhattan_distance = []
    for i in geohashed_loc:
        if i[0] is not np.nan and i[1] is not np.nan:
            lat1, lon1, _, _ = loc_dict[i[0]]
            lat2, lon2, _, _ = loc_dict[i[1]]
            distance.append(cal_distance(float(lat1), float(lon1), float(lat2), float(lon2)))
            manhattan_distance.append(manhattan(float(lat1), float(lon1), float(lat2), float(lon2)))
        else:
            distance.append(np.nan)
            manhattan_distance.append(np.nan)
    result.loc[:, 'distance'] = distance
    result.loc[:, 'manhattan'] = manhattan_distance
    return result
Example #47
0
    def get_inventory(self):
        starttime = UTCDateTime()
        endtime = UTCDateTime()
        client = Client(self.fdsn_server)
        for s in self.streams:
            net, sta, chan, loc = s
            inv = client.get_stations(starttime=starttime,
                                      endtime=endtime,
                                      network=net,
                                      station=sta,
                                      location=loc,
                                      channel=chan,
                                      level="response")
            channels = set(inv.get_contents()['channels'])

            for c in channels:
                try:
                    coords = inv.get_coordinates(c, datetime=starttime)
                except:
                    try:
                        coords = inv.get_coordinates(c)
                    except:
                        print c, "No matching coordinates found"
                        continue

                latitude = coords['latitude']
                longitude = coords['longitude']
                elevation = coords['elevation']
                self.station_coordinfo[c] = \
                    {"latitude": latitude,
                     "longitude": longitude,
                     "elevation": elevation,
                     "geohash": Geohash.encode(latitude,
                                               longitude,
                                               precision=7)
                     }
    def costAlgo(r):
        parsedJson = json.loads(r[1])
        # This needs to change depending on the stream information. Then we handle that and calculate cost.
        fromHash = str(parsedJson["from"])
        geohash = Geohash.encode(fromHash.get("lat"),fromHash.get("lon"), 9)
        dest = str(parsedJson["to"])
        toHash = Geohash.encode(dest.get("lat"),dest.get("lon"), 8)

        keyHash = str(parsedJson["key"])
        tStamp = str(parsedJson["timestamp"])
        value = str(parsedJson["value"])

        # Try to connect and insert into our database
        try:
            conn = psycopg2.connect("dbname='DRP' user='******' password='******'")
        except:
            print "I am unable to connect to the database."

        cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
        try:
            result = None

            while result == None:
                cur.execute("""SELECT * FROM geohashed_ways WHERE geohash LIKE  """ + "'" + geohash + "'%;")
                result = cur.fetchone()
                geohash = geohash[:-1]

        except:
            print "I can't SELECT."

        # Compute the cost using the updated data that was streamed and data already existing in our database.
        tagDict = pghstore.loads(result['tags'])
        isAccident = False
        if keyHash == 'TRAFFIC_INCIDENT':
            cost = value*10
            isAccident = True
        else:
            cost = value

        for k,v in tagDict:
            if k == 'lanes':

                if tagDict[k] == 2 and result['oneway'] == 'yes':
                    if isAccident:
                        cost=cost+(value*10)
                    else:
                        cost=cost+20
                elif tagDict[k] == 2 and result['oneway'] == 'no':
                    if isAccident:
                        cost=cost+(value*5)
                    else:
                        cost=cost+15
                elif tagDict[k] > 2 and result('oneway') == 'yes':
                    if isAccident:
                        cost=cost+(value*5)
                    else:
                        cost=cost+10
                elif tagDict[k] > 2 and result('oneway') == 'no':
                    if isAccident:
                        cost=cost+(value*2)
                    else:
                        cost=cost+5

            elif k == 'highway':
                if tagDict[k] == 'primary':
                    cost=cost-20
                elif tagDict[k] == 'secondary':
                    cost=cost-15
                elif tagDict[k] == 'trunk':
                    cost=cost-10
                elif tagDict[k] == 'trunk_link':
                    cost=cost-5

            elif k == 'maxspeed':
                if tagDict[k] >= 50:
                    cost = cost-tagDict[k]
                else:
                    cost = cost+tagDict[k]

        try:
            # Insert the updated cost for the way into our database.
            cur.execute("""INSERT INTO ways cost VALUES """ + cost + " where way.id=" + result['id'] + ';')
        except:
            print "I can't INSERT"
        cur.close()
        conn.close()
Example #49
0
    else:
        raise ValueError('coordenadas mal formadas: %r' % coords)
    return lat, long

export = {}
with zipfile.ZipFile(ARQ) as kmz:
    doc = kmz.open('doc.kml')
    raiz = None
    qt_lidos = 0
    capital = True
    for event, elem in ElementTree.iterparse(doc):
        ns = elem.tag[:elem.tag.find('}')+1]
        if elem.tag == ns+'Placemark':
            nome, uf = extrair_nome_uf(ns, elem)
            codmun = extrair_codmun(ns, elem)
            lat, long = extrair_coords(ns, elem)
            qt_lidos += 1
            g_hash = Geohash.encode(lat, long)
            export[codmun] = (uf, int(capital), g_hash, lat, long, nome)
            #break
        elif elem.tag == ns+'Folder':
            nome_folder = elem.findtext(ns+'name')
            assert capital == (nome_folder==u'Capitais'), 'folder e flag nao batem'
            if capital and nome_folder==u'Capitais':
                capital = False

with open('coords.json','wb') as saida:
    json.dump(export, saida, indent=2)

print qt_lidos, 'tuplas salvas'
Example #50
0
# 1000 API calls per day are free -> every 86 seconds. So 100 seconds sleep
	if now > last + 100: 
		last = now
		lat = random.randint(35,60)
		lon = random.randint(-11,30)
		print 'coords: %d,%d' % (lat,lon)
		try:
			forecast = forecastio.manual('https://api.forecast.io/forecast/%s/%d,%d?exclude=minutely,hourly,daily,flags' % (forecast_io_api_key, lat, lon))
		except:
			forecast = None
			pass
		if forecast != None:
			#print forecast.json

# geohash 3 has a lat/lon error of 0.7, so it should be ok for lat and lon as int
			geohash = Geohash.encode(lat, lon, precision=3)

			if forecast.json.has_key('alerts'):
				alerts = forecast.json['alerts']
				for alert in alerts:
					print alert
					ttl = alert['expires'] - now
					payloaddict = dict(_type='msg',tst=now,ttl=ttl,prio=2,icon='fa-exclamation-triangle',desc=alert['description'],title=alert['title'], url=alert['uri'])
					client.publish('msg/forecast.io/%s' % geohash, payload=json.dumps(payloaddict), qos=2, retain=True)
					print 'pub: ' + 'msg/forecast.io/%s' % geohash
			else:
				if forecast.json.has_key('currently'):
					currently = forecast.json['currently']
					weather = '%s' % (currently['summary'])
					payloaddict = dict(_type='msg',tst=currently['time'],ttl=24*3600,prio=1,icon='fa-exclamation-triangle',title='Local Weather',desc=weather, url='http://forecast.io')
				else:
Example #51
0
def geohash(latitude, longitude):
    return Geohash.encode(float(latitude), float(longitude))
Example #52
0
 def generate_cache_key(self, lat, lon):
     return Geohash.encode(lat, lon)
Example #53
0
 def _get_geohash(self, town):
     if town is not None:
         return geohash.encode(town.latitude, town.longitude)
     else:
         return None
def change_corr(cor, precision=5):
    x, y = cor
    geohash = Geohash.encode(x, y, precision=precision)
    x, y = Geohash.decode(geohash)
    return [float(x), float(y)]
Example #55
0
    for values2 in cursor2:
        docStandort = dict(zip(cursor2.column_names, values2))
        breite = None
        laenge = None
        if docStandort["standort_laenge"] and docStandort["standort_breite"]:
            breite = docStandort["standort_breite"]
            laenge = docStandort["standort_laenge"]
            docStandort["koordinaten_institutionengenau"] = True
        elif docStandort["ort_laenge"] and docStandort["ort_breite"]:
            breite = docStandort["ort_breite"]
            laenge = docStandort["ort_laenge"]
            docStandort["koordinaten_institutionengenau"] = False
        if breite and laenge:
            docStandort["koordinaten"] = str(breite) + "," + str(laenge)
            docStandort["geohash"] = []
            geohash = Geohash.encode(breite, laenge)
            i = 1
            while (i <= len(geohash)):
                docStandort["geohash"] += [('%02d' % i) + "-" + geohash[0:i]]
                i += 1

        del docStandort["standort_laenge"]
        del docStandort["standort_breite"]
        del docStandort["ort_laenge"]
        del docStandort["ort_breite"]

        if docStandort["bistum_uid"]:
            queryBistumURL = """
			SELECT
				url.url, url.bemerkung,
				url_typ.name AS url_typ
Example #56
0
def get_eloc_latlon(result):
    eloc_latlon = result['geohashed_end_loc'].apply(lambda x: geohash.decode_exactly(x)[:2])
    result['eloc_lat'] = eloc_latlon.apply(lambda x: float(x[0]))
    result['eloc_lon'] = eloc_latlon.apply(lambda x: float(x[1]))
    return result
Example #57
0
def build_geohash_id(lat, lon):
    """Build a GR id with an embedded geohash"""
    gh = Geohash.encode(lat, lon)
    id="GR_%s" % (gh)
    return str(id)
#coding:utf8
import Geohash
#longitude : 经度
#latitude  : 纬度

lng = 116.37439
lat = 39.94758

h = Geohash.encode(lat,lng)
print h
print Geohash.decode(h)
Example #59
0
        simplergeo.__init__(self, **kwargs)
        self.cfg = cfg

if __name__ == '__main__' :

    import Geohash
    import time
    import json

    token='YER_SIMPLEGEO_TOKEN'
    secret='YER_SIMPLEGEO_SECRET'
    layer_name='YER_SIMPLEGEO_LAYER'

    lat = 37.764845
    lon = -122.419857
    uid = Geohash.encode(lat, lon)

    args = {
        'geometry' : {
            'type' : 'Point',
            'coordinates' : [ lon, lat ],
            },
        'created' : int(time.time()),
        'properties' : {},
        'type' : 'Feature',
        }

    geo = simplegeo(token=token, secret=secret, debug=True)

    req = '/records/%s/%s.json' % (layer_name, uid)
    rsp = geo.execute_request_simple(req, method='PUT', args=json.dumps(args))
Example #60
0
def get_sloc_latlon(result):
    sloc_latlon = result['geohashed_start_loc'].apply(lambda x: geohash.decode_exactly(x)[:2])
    result['sloc_lat'] = sloc_latlon.apply(lambda x: float(x[0]))
    result['sloc_lon'] = sloc_latlon.apply(lambda x: float(x[1]))
    return result