def purchases(): idToken = request.args['idToken'] accountType = request.args['accountType'] # #check if token in valid # try: # decodedToken = auth.verify_id_token(idToken) # except: # return "INVALID USER TOKEN" # uid = decodedToken['uid'] # #determine accountId of the correct account accountId = getAccountId(accountType, customerId) if accountId == "": return "ACCOUNT TYPE DOES NOT EXIST" #retrieve all purchases url = 'http://api.reimaginebanking.com/accounts/{}/purchases?key={}'.format(accountId, apiKey) r = requests.get(url = url) # extracting data in json format purchaseData = r.json() locationFrequency = {} decodedLatLng = {} allPurchases = [] for purchase in purchaseData: purchaseObject = {} purchaseInfo = parsePurchase(purchase) merchantId = purchaseInfo["merchantId"] merchantInfo = parseMerchant(merchantId) lat = merchantInfo["lat"] lng = merchantInfo["lng"] purchaseObject["name"] = merchantInfo["name"] purchaseObject["amountSpent"] = purchaseInfo["amountSpent"] purchaseObject["date"] = purchaseInfo["date"] purchaseObject["category"] = merchantInfo["category"] allPurchases.append(purchaseObject) code = geohash2.encode(lat, lng) code = code[:-8] if code in locationFrequency: locationFrequency[code] += 1 decodedLatLng[geohash2.decode(code)] += 1 else: locationFrequency[code] = 1 decodedLatLng[geohash2.decode(code)] = 1 out = sorted(decodedLatLng, reverse=True, key=decodedLatLng.get) locations = out[0:5] outputData = parseGroupon(locations) return json.dumps(outputData)
def geohash_decode(geohash): """ Compute coordinates from Geohash. -- https://en.wikipedia.org/wiki/Geohash """ return geohash2.decode(geohash)
def _decode(geohash: str) -> tuple[float, float]: """ Decode geohash to latitude/longitude. Location is approximate centre of geohash cell, to reasonable precision. Parameters ---------- geohash : str Geohash str to be converted to latitude/longitude. Return ------ (lat : float, lon : float) Geohashed location. Example ------- >>> from pymove.utils.geoutils import _decode >>> geoHash_1 = '7pkddb6356fyzxq' >>> geoHash_2 = '7pkd7t2mbj0z1v7' >>> print(_decode(geoHash_1), type(_decode(geoHash_1))) ('-3.777736', '-38.547792') <class 'tuple'> >>> print(_decode(geoHash_2), type(_decode(geoHash_2))) ('-3.793388', '-38.517722') <class 'tuple'> """ return gh.decode(geohash)
def geohash_to_dd(geohash): coordinates = None coordinates = geohash2.decode(geohash) coordinates = " ".join(coordinates) return coordinates
def get_quantize_coords_from_geohash(precision, geohash_map): """ Returns the quantized coordinates for image's coordinates in the requested precision. """ precision_string = 'hash{precision}'.format(precision=precision) lat, lng = geohash2.decode(geohash_map[precision_string]) return (float(lat), float(lng))
def _decode(geohash): """ Decode geohash to latitude/longitude (location is approximate centre of geohash cell, to reasonable precision). Parameters ---------- geohash : string Geohash string to be converted to latitude/longitude. Return ------ (lat : number, lon : number) Geohashed location. """ return gh.decode(geohash)
def _decode(geohash: Text) -> Tuple[float, float]: """ Decode geohash to latitude/longitude (location is approximate centre of geohash cell, to reasonable precision). Parameters ---------- geohash : str Geohash str to be converted to latitude/longitude. Return ------ (lat : float, lon : float) Geohashed location. """ return gh.decode(geohash)
def executeMapQuery(self, session, timestamp): """This query selects all of the data for the 10,000 households at a given timestamp. It also decodes the geohash to a GPS tuple. # example timestamp in ISO 8601 format = '2020-02-01T16:51:03'; # example timestamp = '2020-02-07 15:04:34' """ queryStr = "SELECT geohash, energy from simpletimeseries where timestampcol=?" map_lookup_stmt = session.prepare(queryStr) rows = session.execute(map_lookup_stmt, [timestamp]) df = pd.DataFrame(rows) # 'geohash', 'energy' df['GPS'] = df['geohash'].apply(lambda x: geohash2.decode(x)) df['lat'] = df['GPS'].apply(lambda x: x[0]) df['lon'] = df['GPS'].apply(lambda x: x[1]) return df
def get_signal_info(signal_id): """Call FOAM signal functions on Ethereum blockchain and FOAM API and return their results. Args: signal_id [int]: Unique identifier of a signal. Return: dict: Information about a signal. If signal exists; its radius, geohash, mint_time, coordinates, mint_time, burn_time, cst and how much was staked for it. """ w3 = Web3(Web3.HTTPProvider(INFURA_URL)) contract_abi = json.dumps(json.load(open(CONTRACT_ABI_PATH))) signals = w3.eth.contract(address=CONTRACT_ADDRESS, abi=contract_abi) signal_info = { "exists": signals.functions.exists(signal_id).call(), # Does signal exist [bool] "radius": signals.functions.tokenRadius( signal_id).call(), # Radius of a signal [int/float] "geohash": signals.functions.tokenGeohash( signal_id).call(), # Signal's geohash [hex] "mint_time": signals.functions.tokenMintedOn( signal_id).call(), # Time of creation - epoch [int] "burn_time": signals.functions.tokenBurntOn( signal_id).call(), # Time of deletion - epoch [int] "cst": signals.functions.computeCST(CONTRACT_ADDRESS, signal_id).call().hex(), # CST [hex] "staked": signals.functions.tokenStake(signal_id).call() } # How much FOAM staked - in WEI [int] if not signal_info['exists']: raise Exception('Invalid signal') # tokenGeohash from the blockchain is not an actual geohash, but Crypto-Spatial Coordinates # Geohash can be obtained from FOAM API signal_info_api = requests.get( f"https://map-api-direct.foam.space/signal/details/{signal_info['cst']}" ).json() signal_info["geohash"] = signal_info_api["geohash"] signal_info["coordinates"] = geohash2.decode(signal_info_api["geohash"]) return signal_info
accuracy_arr.append(accuracy_model) start=end end=end+int(x.values[j]) print("---------------------------------------------------------------") print("These cities have less than 1500 datapoint please enter sufficient number of data point to get correct prediction\n !!",place_insufficient_data,"\n\nThank you 🙂 !!") print("\nTotal number of cities with insufficient data are: ",count,"\n") print("---------------------------------------------------------------") #Creating list to store latitude and longitude of particular geahash value geohash_lat=[] geohash_lon=[] #this loop decodes value of geahash6 coloumn ad returns latitude and longitude which will be stored in a list for j in range(len(place_arr)): geohash_lat.append(float(gh.decode(place_arr[j])[0])) geohash_lon.append(float(gh.decode(place_arr[j])[1])) #Finally making dataframe which will contain name of place, its latitude and Longitude, and accuracy of model prediction for particular place list_of_tuples2 = list(zip(place_arr, geohash_lat, geohash_lon, accuracy_arr)) df = pd.DataFrame(list_of_tuples2, columns = ['Place','Place_latitude','Place_longitude','Accuracy']) df.to_csv("Neural_Network_model.csv") #End time to measure how much time our program took to run end_time=time.time() print("The places in data with their respective latitude and longitude can be predicted with accuracy as given below:\n\n\n",df) print("\n Total time taken to execute program is:", end_time-start_time, "seconds")
import pandas as pd import numpy as np import geohash2 df = pd.read_csv("D:\\Program_file\\python_file\\contest\\test2.csv") print(df) df["J1"] = np.nan df["W1"] = np.nan df["J2"] = np.nan df["W2"] = np.nan for i in range(df.shape[0]): df.iloc[i, 7], df.iloc[i, 8] = geohash2.decode(df.iloc[i][5]) df.iloc[i, 9], df.iloc[i, 10] = geohash2.decode(df.iloc[i][6]) print(df) dx = df.groupby(by=[df["J1"], df["W1"], df["J2"], df["W2"]]) di = dx.count()["userid"] print(di) """ print("----------------------------") print(df.shape, " ", df.shape[0]) print("----------------------------") c1 = df["geohashed_start_loc"] c2 = df["geohashed_end_loc"] print("----------------------------") d1 = np.array(c1) d2 = np.array(c2) print(d1) print("----------------------------") e1 = list(map(geohash2.decode, d1))
df = pd.read_csv('D:/1.csv') #df.userid.unique().size #df.biketype.unique().size df = df.sample(frac=0.001) start = pd.DataFrame(columns=['j1', 'w1']) end = pd.DataFrame(columns=['j2', 'w2']) #distance = [] for i in range(df.shape[0]): start = start.append( [{'j1': geohash2.decode(df.iloc[i][5])[0], 'w1': geohash2.decode(df.iloc[i][5])[1], }], ignore_index=True) end = end.append( [{'j2': geohash2.decode(df.iloc[i][6])[0], 'w2':geohash2.decode(df.iloc[i][6])[1], }], ignore_index=True) def get_distance_hav(lon1, lat1, lon2, lat2): lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) dlon = lon2 - lon1 dlat = lat2 - lat1 a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2 c = 2*asin(sqrt(a))*6371*1000 c = round(c/1000, 3) return c ''' def hav(theta): s = sin(theta / 2)
def _decode(geohash): return gh.decode(geohash)
from folium.plugins import HeatMap import folium import re df = pd.read_csv( 'd:/1.csv', usecols=['starttime', 'geohashed_start_loc', 'geohashed_end_loc']) df_start = pd.DataFrame() df_end = pd.DataFrame() df = df[180000:230000] for i in range(df.shape[0]): an = re.search('2017-05-10\s07:\d\d:\d\d', df.iloc[ i, 0]) #2017-05-(10|11|12|15|16|17|18|19|22|23)\s(06|07|08):\d\d:\d\d if (an): df_start = df_start.append([geohash2.decode(df.iloc[i][1])], ignore_index=True) df_end = df_end.append([geohash2.decode(df.iloc[i][2])], ignore_index=True) lis_start = [] lis_end = [] for j in range(df_start.shape[0]): lis_start.append([df_start.iloc[j][0], df_start.iloc[j][1], 1]) for k in range(df_end.shape[0]): lis_end.append([df_end.iloc[k][0], df_end.iloc[k][1], 1]) city_map_start = folium.Map( location=[39.93, 116.38], zoom_start=15,
data_test_bikeid_counter = Counter(data_test['bikeid']).most_common(100) print(data_test_bikeid_counter) # In[17]: #计算繁忙的点 data_test_start_loc_counter_all = Counter(data_test['geohashed_start_loc']) data_test_start_loc_counter = Counter( data_test['geohashed_start_loc']).most_common(100) print(data_test_start_loc_counter) # In[19]: #print(list(data_test_start_loc_counter)) #print(data_test_start_loc_counter_all.items()) data_test_start_loc_counter_all_dit = dict(data_test_start_loc_counter_all) print(data_test_start_loc_counter_all_dit) # In[21]: #data_test['ll_start_loc'] = pd.loc(data_test['geohashed_start_loc']) data_test_hashed_start_loc = data_test['geohashed_start_loc'] print(data_test_hashed_start_loc) data_test_ll_start_loc = gh.decode(data_test_hashed_start_loc) print(data_test_ll_start_loc) # In[18]: print(data_test_start_loc_counter_all.elem())
mintemp = str(res['temperatureMin'] [index]) if res['temperatureMin'][index] else 'n/a' print('[forecast] %s - High of %s, Low of %s' % (day, maxtemp, mintemp)) print('[forecast] %s - %s' % (day, res['narrative'][index])) # additional entries include (but not limited to): # moonPhase, sunriseTimeLocal, daypart['precipChance'], daypart['windDirection'], etc else: print('[forecast] daily forecast returned') for item in results: count = item['value'] geo_area = item['key'] [lat, lon] = geohash2.decode(geo_area) lat = float(lat) lon = float(lon) name_results = reverseGeocode(lat, lon) if (name_results['count'] > 0): place_name = name_results['results'][0]['county'] if not place_name: place_name = name_results['results'][0]['name'] country = name_results['results'][0]['country'] print("\n\n======= {:s} ({:s}) =======".format(place_name, country))
# In[19]: #Reading input file traf_mgmt_file = your_local_path + 'training.csv' print(traf_mgmt_file) traf_mgmt_data = pd.read_csv(traf_mgmt_file) #Head of input traf_mgmt_data.head() # In[20]: #Converting Geohash to its respective Latitude and Longitude import geohash2 as pgh traf_mgmt_data['latlong'] = traf_mgmt_data['geohash6'].apply( lambda x: pgh.decode(x)) traf_mgmt_data.tail() # In[21]: #Splitting the latitude and longitude traf_mgmt_data[['lat', 'long']] = pd.DataFrame(traf_mgmt_data['latlong'].tolist(), index=traf_mgmt_data.index) traf_mgmt_data.tail() # In[24]: #Identifying the unique latlong, geohash6 points unique_geohash6 = traf_mgmt_data['geohash6'].unique().tolist() unique_latlong = traf_mgmt_data['latlong'].unique().tolist()
df = pd.read_csv( 'D:\\Program_file\\python_file\\python.con\\mobike_train_data.csv', usecols=['starttime', 'geohashed_start_loc', 'geohashed_end_loc']) df_start = pd.DataFrame(columns=['j1', 'w1', 'xx', 'xa']) df_end = pd.DataFrame(columns=['j2', 'w2']) df = df[197000:200000] for i in range(df.shape[0]): an = re.search( '2017-05-(10|11|12|15|16|17|18|19|22|23)\s(06|07|08):\d\d:\d\d', df.iloc[i, 0]) if (an): df_start = df_start.append([{ 'j1': geohash2.decode(df.iloc[i][1])[0], 'w1': geohash2.decode(df.iloc[i][1])[1], 'xx': 1, 'xa': 2 }], ignore_index=True) df_end = df_end.append([{ 'j2': geohash2.decode(df.iloc[i][2])[0], 'w2': geohash2.decode(df.iloc[i][2])[1], 'xx': 1, 'xa': 2 }], ignore_index=True) dx = df_start.groupby(by=[df_start["j1"], df_start["w1"]]) di = dx.count()['xx']
# %% [markdown] # # Task 1 # %% [markdown] # ## Task 1.1 # %% taxi = pd.read_csv('tostudent/taxi_train.csv', parse_dates=[0]) taxi # %% [markdown] # ## Task 1.2 # %% pickup_xy = taxi.apply(lambda row: pd.Series(gh.decode(row['pickup_geohash']), dtype='float', index=['pickup_x', 'pickup_y']), axis=1) dropoff_xy = taxi.apply( lambda row: pd.Series(gh.decode(row['dropoff_geohash']), dtype='float', index=['dropoff_x', 'dropoff_y']), axis=1) taxi = pd.concat([taxi, pickup_xy], axis=1) taxi = pd.concat([taxi, dropoff_xy], axis=1) taxi # %% [markdown] # ## Task 1.3