def _add_spire_distance(offer): offer_point = (offer['latitude'], offer['longitude']) distance = geodesic(offer_point, _spire_point()) distance = round(distance.kilometers, 2) offer['spire_distance_in_km'] = distance return offer
def calc_dist(self, latlng1, latlng2): # calculates geodesic distance from two coordinates return geodesic(latlng1, latlng2)
def point_radial_distance(self,brng,radial): return geodesic(kilometers=radial).destination(point = self, bearing = brng)
def distance(a, b): return geodesic(a, b).ft
def resource_allocation(p, T, time): global trade_off, S, E, I, N, B, C, r, z, beta # Low trade-off favor economic and high trade-off favors vaccine formulation trade_off = 0.95 how_many = warehouse kmeans = KMeans(n_clusters=warehouse, random_state=0).fit(C) cluster_center = kmeans.cluster_centers_ cluster_labels = kmeans.labels_ label_arr = [[] for alpha in range(warehouse)] for i in range(len(cluster_labels)): label_arr[cluster_labels[i]].append(i) # List of warehouses LW = [least_distance_per_cluster(C, label_arr[i]) for i in range(warehouse)] # Equally distributing vaccines across warehouse zones VW = {} current_warehouse = 0 for f in range(1, T + 1): VW[f - 1] = LW[current_warehouse] if (f % (T / warehouse) == 0): current_warehouse += 1 # Defining the parameters for the optimization B = np.array(file['Population Density'].values) * p # rate of disease spread N = np.array(num_agent_per_zone) # total population for each zone Z_B = [((beta[i]-np.mean(beta))/np.sum(beta))+1.0 for i in range(z)] if time <= vaccine_interval: r = np.array([0.4 for i in range(z)]) I = np.array([infected_ratio[t]*num_agent_per_zone[t] for t in range(len(N))]) E = (N-I) * pe S = N - (I + E) ir = [I[i]/(N[i]+0.000001) for i in range(z)] Z_I = [((ir[i]-np.mean(ir))/np.sum(ir))+1.0 for i in range(z)] # Differential equations ----------------------------------- from scipy.integrate import odeint plt.figure(figsize=(10,5)) arry = [] for i in range(z): def model(z0, t): global p, alpha, beta, sigma dsdt = (-beta[i] * z0[0] * z0[2])/zone_pop dedt = (beta[i] * z0[0] * z0[2])/zone_pop - (sigma * z0[1]) didt = sigma * z0[1] - gamma * z0[2] drdt = gamma * (1-alpha) * z0[2] dddt = sigma * alpha * z0[2] #print (bool(dsdt + dedt + didt + drdt + dddt < 0.01)) return [dsdt, dedt, didt, drdt, dddt] zone_pop = np.array(file['Population'].values)[i] temp_I = np.array(file['Total Infected'].values)[i] temp_E = pe * (zone_pop - temp_I) temp_S = zone_pop - (temp_I + temp_E) # initial condition z0 = [temp_S, temp_E, temp_I, 0, 0] # time points t = np.linspace(0, 50) final_z = odeint(model, z0, t) arry.append(final_z[:, 2]) # plt.plot(t, final_z[:, 0], 'b-', label = 'Susceptible') # plt.plot(t, final_z[:, 1], 'r-', label = 'Exposed') # plt.plot(t, final_z[:, 2], 'g-', label = 'Infected', alpha=0.2) # plt.plot(t, final_z[:, 3], 'brown', label = 'Recovered') # plt.plot(t, final_z[:, 4], 'black', label = 'Death') # plt.legend() # plt.plot(t, np.mean(arry, axis=0), linewidth=3, alpha=1.0, color='red') # name = 'sigma_005.png' # plt.title(name) # plt.tight_layout() # plt.savefig(name, dpi=300) # plt.show() # ---------------------------------------------------------- model = pulp.LpProblem("Vaccine problem", pulp.LpMinimize) X = pulp.LpVariable.dicts("X", ((i, j) for i in range(warehouse) for j in range(len(B))), lowBound = 0.0, upBound = int(T/warehouse), cat='Continuous') dist_array = [geodesic(C[VW[j]], C[b]).miles for j in range(T) for b in range(z)] max_dist = np.max(dist_array) den_economic = float(T * max_dist) model += np.sum([X[j, b] * geodesic(C[LW[j]], C[b]).miles for j in range(warehouse) for b in range(z)])/den_economic # Constraint 1 -------------------------------------------------------------------------- for i in range(warehouse): # Condition 1: If you must assign all the vaccines generated by a warehouse #model += pulp.lpSum([X[(i, j)] for j in range(len(B))]) == int(T/warehouse) # Condition 2: If you want to minimize the number of vaccines model += pulp.lpSum([X[(i, j)] for j in range(len(B))]) <= int(T/warehouse) # Constraint 2 -------------------------------------------------------------------------- s = 0.0 for i in range(warehouse): s += pulp.lpSum([X[(i, j)] for j in range(len(B))]) # Condition 1: If you must assign all the vaccines generated by a warehouse #model += s == T # Condition 2: If you want to minimize the number of vaccines model += s <= T # Constraint 3 (fairness lower) --------------------------------------------------------- another_arr = [] for i in range(len(B)): # Condition 3: If calculation is based on susceptible population c = (S[i] - r[i] * pulp.lpSum([X[(j, i)] for j in range(warehouse)]))/sum(S) # Condition 4: To include population density # c *= Z_B[i] # Condition 5: To include infected population c *= Z_I[i] model += pulp.lpSum([X[(j, i)] for j in range(warehouse)]) >= trade_off * c * T # Condition 3 only #another_arr = [(S[i])/(max(S)) for i in range(z)] # Condition 3 + 4 #another_arr = [(S[i]*beta[i])/(max(S)*max(beta)) for i in range(z)] # Condition 3 + 4 + 5 #another_arr = [(S[i]*ir[i]*beta[i])/(np.max(S)*np.max(ir)*np.max(beta)) for i in range(z)] # ----------------------------------------------------------------------------------------- model.solve() print(pulp.LpStatus[model.status]) # Transferred the pulp decision to the numpy array (A) A = np.zeros((T, len(B))) for i in range(warehouse): for j in range(len(B)): A[i, j] = X[(i, j)].varValue global double_arr double_arr.append(["Value: " + str(pulp.value(model.objective))]) vaccines_per_zone = [] for i in range(len(B)): vaccines_per_zone.append(np.sum(A[:, i])) # # Outputs the number of vaccines distributed to each zone # print('LW: ', LW) # print('Trade-off value: ' + str(trade_off)) # plt.figure(figsize=(10,4)) # plt.bar([i for i in range(z)], vaccines_per_zone) # plt.show() # ---------------------------------------------------------------------------------------- ''' print('\nformula', another_arr) print('\nvac', vaccines_per_zone) slope, intercept, r_value, p_value, std_err = stats.linregress(vaccines_per_zone,another_arr) print('Correlation: ', np.corrcoef(vaccines_per_zone, another_arr)[0, 1]) print('R-Value: ', r_value) plt.figure(figsize=(8,4)) plt.plot([0, np.max(vaccines_per_zone)], [intercept, (slope*np.max(vaccines_per_zone) + intercept)], color='gray', linestyle='--') plt.scatter(vaccines_per_zone, another_arr, s=5) plt.ylabel('Susceptible, Population Density, and Infected Score', fontsize=8) plt.xlabel('Vaccines Per Zone', fontsize=14) plt.tight_layout() plt.savefig('sus-pop-inf.png', dpi=300) plt.show() ''' # ---------------------------------------------------------------------------------------- return np.array(vaccines_per_zone)
print("0", number_of_zero_flux, total_number_of_fluxes, 100 * number_of_zero_flux / total_number_of_fluxes, "%") print(">", number_of_fluxes_over_the_threshold, total_number_of_fluxes, 100 * number_of_fluxes_over_the_threshold / total_number_of_fluxes, "%") print( "Sum:", 100 * amount_of_missing_values / total_number_of_fluxes + 100 * number_of_zero_flux / total_number_of_fluxes + 100 * number_of_fluxes_over_the_threshold / total_number_of_fluxes, "%") distance_list = np.zeros(np.shape(longitude_list)) first_point = (np.mean(lat), longitude_list[0]) for i in range(len(longitude_list)): current_point = (np.mean(lat), longitude_list[i]) distance_list[i] = geo.geodesic(first_point, current_point).km #Distance in km delta_X = thesis.central_differences(distance_list) print("\n\n\n", cruisename, "flux sum:") print("Osborn rolling mean", np.nansum(rolling_mean_Osborn_flux * delta_X), "Osborn profiles", np.nansum(mean_Osborn_flux * delta_X)) print("Shih rolling mean", np.nansum(rolling_mean_Shih_flux * delta_X), "Shih profiles", np.nansum(mean_Shih_flux * delta_X)) print("\n\n\n") np.savetxt( "./" + cruisename + '_fine_flux_results.txt', np.transpose([ longitude_list, distance_list, mean_Osborn_flux, rolling_mean_Osborn_flux, mean_Shih_flux, rolling_mean_Shih_flux
def extract_features(listing, transportation): distances = cdist(listing, transportation, lambda x, y: geodesic(x, y).miles) features = get_distance_features(distances) return features, distances
def segment(lati1,longi1,lati2,longi2): del seg_Total_elevation[:] del seg_Total_distance[:] del seg_Total_time[:] del seg_min_altitude[:] del seg_max_altitude[:] del seg_Avg_speed[:] del seg_latitude[:] del seg_longitude[:] del seg_elevation[:] del seg_time[:] u = len(trck_latitude) v = 0 while v<u: latitude = trck_latitude[v] longitude = trck_longitude[v] Time = trck_Time[v] elevation = trck_elevation[v] near_lat1 = 1 near_lon1 = 1 near_lat2 = 1 near_lon2 = 1 p = len(latitude) i = 1 near_dist1 = 10000000 near_dist2 = 10000000 point1 = 0 point2 = 0 while i<p-1: a = (lati1, longi1) b = (lati2,longi2) c = (latitude[i],longitude[i]) dist1 = (geodesic(a,c).km) dist2 = (geodesic(b,c).km) if near_dist1>dist1: near_dist1 = dist1 near_lat1 = latitude[i] near_lon1 = longitude[i] point1 = i if near_dist2>dist2: near_dist2 = dist2 near_lat2 = latitude[i] near_lon2 = longitude[i] point2 = i i = i+1 print(point1) print(point2) p1 = min(point1,point2) p2 = max(point1,point2) print(p1) print(p2) lati = [] longi = [] ele = [] sec = [] k = p1 while k<=p2: lati.append(latitude[k]) longi.append(longitude[k]) sec.append(Time[k]) ele.append(elevation[k]) k = k+1 seg_latitude.append(lati) seg_longitude.append(longi) seg_elevation.append(ele) seg_time.append(sec) # to plot data on google maps data = gmplot.GoogleMapPlotter(lati[0],longi[0],17) data.scatter(lati,longi,'#FF0000',size = 1, marker = False) data.plot(lati,longi, 'cornflowerblue', edge_width = 3.0) data.draw("templates/part.html") n = len(sec) td = sec[n-1] - sec[0] Total_time = int(round(td.total_seconds())) print(Total_time) alt_max = max(ele) alt_min = min(ele) ele = alt_max - alt_min total_dist=0 p = len(lati) i = 1 while i<p-1: a = (lati[i], longi[i]) b = (lati[i+1],longi[i+1]) total_dist += (geodesic(a,b).km) i = i+1 seg_Total_time.append(int(Total_time/60)) seg_Total_elevation.append(ele) seg_Total_distance.append(total_dist) seg_min_altitude.append(alt_min) seg_max_altitude.append(alt_max) if Total_time!=0: seg_Avg_speed.append((total_dist*1000)/Total_time) else: seg_Avg_speed.append(2) v = v+1
from geopy.distance import geodesic from geopy.distance import great_circle cds_1 = (-23.421531, -46.527697) cds_2 = (-23.523583, -46.628641) destino = "Rua Professor Eldemar Alves de Oliveira, 176" origem = "Rua Eduardo Chaves, 183" print(geodesic(cds_1, cds_2)) print(great_circle(cds_1, cds_2)) Anp6aw78wQ0gJrzuzbkUBoGF04IjYJBsL8kWU_15hV0FPAU8Kf916T9SR5I-BOmz
def trck(file_name): del trck_latitude[:] del trck_longitude[:] del trck_elevation[:] del trck_Time[:] del files[:] # extracting data for file in os.listdir('gps_data/'): latitude = [] longitude = [] elevation = [] Time = [] x_tag = file[:-4] files.append(x_tag) name = "gps_data/" + file gpx_file = open(name,'r') gpx = gpxpy.parse(gpx_file) for track in gpx.tracks: for segment in track.segments: for point in segment.points: for ex in point.extensions: latitude.append(point.latitude) longitude.append(point.longitude) elevation.append(point.elevation) Time.append(point.time) trck_latitude.append(latitude) trck_longitude.append(longitude) trck_elevation.append(elevation) trck_Time.append(Time) for file in os.listdir('gps_data/'): name = "gps_data/"+file os.remove(name) # plot = {'Latitude':latitude,'Longitude':longitude,'Elevation':elevation,'Time':Time} # df = pd.DataFrame(plot) # df.to_csv('file1.csv') no_of_trck = len(trck_latitude) print("hii") print(no_of_trck) print("hii") i = 0 # plotting gps data on google maps data = gmplot.GoogleMapPlotter(latitude[0],longitude[0],17) while i < no_of_trck: latitude = trck_latitude[i] longitude = trck_longitude[i] elevation = trck_elevation[i] Time = trck_Time[i] data.scatter(latitude,longitude,'#000000',size = 1, marker = False) data.plot(latitude,longitude, '%s'%points[i], edge_width = 3.0) i = i+1 if i==no_of_trck: data.draw("%s"%file_name) j = 0 del trck_Total_elevation[:] del trck_Total_distance[:] del trck_Total_time[:] del trck_max_altitude[:] del trck_min_altitude[:] del trck_Avg_speed[:] while j < no_of_trck: latitude = trck_latitude[j] longitude = trck_longitude[j] elevation = trck_elevation[j] Time = trck_Time[j] n = len(Time) td = Time[n-1] - Time[0] Total_time = int(round(td.total_seconds())) alt_max = max(elevation) alt_min = min(elevation) ele = alt_max - alt_min total_dist=0 p = len(latitude) i = 0 while i<p-1: a = (latitude[i], longitude[i]) b = (latitude[i+1],longitude[i+1]) total_dist += (geodesic(a,b).km) i = i+1 total_dist = int(total_dist) trck_min_altitude.append(alt_min) trck_max_altitude.append(alt_max) trck_Total_time.append(int(Total_time/60)) trck_Total_distance.append(total_dist) trck_Total_elevation.append(ele) if Total_time!=0: trck_Avg_speed.append((total_dist*1000)/Total_time) else: trck_Avg_speed.append(2) # print(trck_Total_distance) j = j+1 s = len(trck_longitude) print(s)
def graphanalysis(tracks,attribute): total_trck = len(tracks) print("golu") if attribute=="Total_Time": y = trck_Total_time y_pos = np.arange(len(files)) print(y_pos) plt.bar(y_pos,y,align='center',alpha=0.8,width=0.1) plt.xticks(y_pos,files) plt.ylabel('Total time (in minutes)') plt.title("Total_time vs Tracks") elif attribute=="Total_Elevation": y = trck_Total_elevation y_pos = np.arange(len(files)) plt.bar(y_pos,y,align='center',alpha=0.8,width=0.1) plt.xticks(y_pos,files) plt.ylabel('Total elevation (in metre)') plt.title("Total_elevation vs Tracks") elif attribute=="Average_Speed": y = trck_Avg_speed y_pos = np.arange(len(files)) plt.bar(y_pos,y,align='center',alpha=0.8,width=0.1) plt.xticks(y_pos,files) plt.ylabel('Average_speed (in m/s)') plt.title("Average_Speed vs Tracks") else: i=0 while i<total_trck: j = tracks[i]-1 elevation = trck_elevation[j] Time = trck_Time[j] latitude = trck_latitude[j] longitude = trck_longitude[j] distance = [] time = [] p = len(elevation) k = 0 while k<=p-1: a = (latitude[0], longitude[0]) b = (latitude[k],longitude[k]) c = (geodesic(a,b).km) distance.append(c) k = k+1 i = i+1 if attribute=="Elevation": plt.plot(distance,elevation,label='%s'%files[i-1]) elif attribute=="Time": u = len(Time) t = 0 while t<=u-1: td = Time[t] - Time[0] td = int(round(td.total_seconds()/60)) time.append(td) t = t+1 plt.plot(distance,time,label='%s'%files[i-1]) plt.xlabel('Distance(in km)') if attribute=="Elevation": plt.ylabel('Elevation(in metre)') plt.title('Distance Vs Elevation') elif attribute=="Time": plt.ylabel('Time(in minutes)') plt.title('Distance Vs Time') plt.grid(True) plt.legend() graph_name = "graph" + str(tm.time()) + ".png" for filename in os.listdir('static/'): if filename.startswith('graph'): # not to remove other images os.remove('static/' + filename) plt.savefig('static/'+ graph_name,bbox_inches='tight') plt.close() name = "static/"+graph_name return name
def find_pets_with_cache(pf: Petfinder, location=None, animal_type=None, breed=None, size=None, gender=None, age=None, color=None, coat=None, org_name=None, distance=None, name=None, good_with=[], house_trained=None, special_needs=None, sort=None): # if cache is empty fill with query if cache.get('default') is None: pets, _ = find_pets(pf, location, animal_type, breed, size, gender, age, color, coat, org_name, distance, name, good_with, house_trained, special_needs, sort) # if 'primary_photo_cropped' in pets.columns: # del pets['primary_photo_cropped'] cache.set('default', pets) else: pets = cache.get('default') # search cache for characteristics if animal_type is not None and pets.shape[0] >= num_results: pets = pets.loc[pets['animal_type'] == animal_type] if breed is not None and pets.shape[0] >= num_results: pets = pets.loc[pets['breeds'] == breed] if size is not None and pets.shape[0] >= num_results: pets = pets.loc[pets['size'] == size] if gender is not None and pets.shape[0] >= num_results: pets = pets.loc[pets['gender'] == gender] if age is not None and pets.shape[0] >= num_results: pets = pets.loc[pets['age'] == age] if color is not None and pets.shape[0] >= num_results: pets = pets.loc[pets['colors'] == color] if coat is not None and pets.shape[0] >= num_results: pets = pets.loc[pets['coat'] == coat] if name is not None and pets.shape[0] >= num_results: pets = pets.loc[pets['name'] == name] # location search if location is not None and pets.shape[0] >= num_results: curr_location = locator.geocode(location) curr_latlong = (curr_location.latitude, curr_location.longitude) # remove results with no latlong pets = pets.loc[(np.isnan(pets['contact.address.lat']) is False) & (np.isnan(pets['contact.address.long']) is False)] # pets = pets.loc[((geodesic(curr_latlong, (pets['contact.address.lat'], # pets['contact.address.long'])).miles) < distance)] # there's probably a better way but this works pets['new'] = 0 for index in pets.index: pets.loc[index, 'new'] = geodesic( curr_latlong, (pets.loc[index, 'contact.address.lat'], pets.loc[index, 'contact.address.long'])).miles pets = pets.loc[pets['new'] < distance] del pets['new'] # if there are not enough results in the cache, run query and cache results if pets.shape[0] < num_results: pets, _ = find_pets(pf, location, animal_type, breed, size, gender, age, color, coat, org_name, distance, name, good_with, house_trained, special_needs, sort) temp = cache.get('default') # if 'primary_photo_cropped' in pets.columns: # del pets['primary_photo_cropped'] if 'primary_photo_cropped' in temp.columns: del temp['primary_photo_cropped'] pets = pets.append(temp, ignore_index=True) pets = pets.drop_duplicates( subset=['animal_type', 'gender', 'age', 'coat', 'name']) cache.set('default', pets) else: # if more results than wanted, drop bottom values until there are proper number of results while pets.shape[0] > num_results: pets.drop(pets.index[num_results]) return pets
# 这里返回的是topk的索引 然后根据索引再去grid表中找对应的位置信息 再求平均值 top_k_index = calcTopKInex(tdoa,grid_numpy) # top_k_index = calcTop_1_Inex(tdoa,grid_numpy) # print("打印topk索引") # print(top_k_index) #打印输出最近的五个点 print("******************************************") print("当前飞机的位置和tdoa为:",lat,lon,tdoa) print("******************************************") print("打印输出预测出的top5个网格,以及这5个网格的相关信息") for x in top_k_index: print("当前网格的序号为:",x) print("当前网格的位置和tdoa为",grid[x][1:3],grid[x][3:6]) tdoa_eula = np.linalg.norm(grid_numpy[x] - tdoa) print("当前网格的tdoa和飞机的tdoa的欧式距离为:",tdoa_eula) error = geodesic((lat, lon), (grid[x][1], grid[x][2])).m print("当前网格与飞机的真实位置距离为%.2fm" % ( error)) print("************************************************") # 根据索引,计算位置的预测值 predictLatitude,predictLongitude = calcLocationEstimation(top_k_index,grid) print("打印预测位置") print(predictLatitude,predictLongitude) # 计算误差 error = geodesic((lat, lon), ( predictLatitude , predictLongitude )).m
ind = list(dataframe['hostnames']).index((src_node)) src_lat = list(dataframe['latitude'])[ind] src_lon = list(dataframe['longitude'])[ind] # print("src") # print(src_lat,src_lon) # print(ind) ind = list(dataframe1['hostnames']).index((dst_node)) dst_lat = list(dataframe1['latitude'])[ind] dst_lon = list(dataframe1['longitude'])[ind] # print("dst") # print(dst_lat,dst_lon) # print(ind,dst_node) # print(ind,"",dst_lat,"",dst_lon) distance = geodesic([src_lat, src_lon], [dst_lat, dst_lon]).kilometers s_lat.append(src_lat) s_lon.append(src_lon) d_lat.append(dst_lat) d_lon.append(dst_lon) dist.append(distance) nodes_dist['src_lat'] = s_lat nodes_dist['src_lon'] = s_lon nodes_dist['dst_lat'] = d_lat nodes_dist['dst_lon'] = d_lon nodes_dist['distance'] = dist # #%% regr = LinearRegression()
def bike_data(self, results, user_lat, user_lng, bike_type, action): contents = [] num = 0 message_contents = [] for each in results: num += 1 name_tw = each["name_tw"].replace("(", " (") available_spaces = str(each["available_spaces"]) empty_spaces = str(each["empty_spaces"]) lng = str(each["loc"][0]) lat = str(each["loc"][1]) updated_at = str(each["updated_at"]).replace("-", "/") if bike_type == 1: name_tw = "(1.0) " + name_tw else: name_tw = "(2.0) " + name_tw origin_point = (user_lat, user_lng) dist_point = (lat, lng) distance = str(round(geodesic(origin_point, dist_point).meters, 2)) if action == "borrow": if int(available_spaces) > 0: bubble = { "type": "bubble", "hero": { "type": "image", "url": "https://i.imgur.com/4vK8avw.png", "size": "full", "aspectRatio": "1.7:1", "aspectMode": "cover", "action": {"type": "uri", "uri": "http://linecorp.com/"}, }, "body": { "type": "box", "layout": "vertical", "contents": [ { "type": "text", "text": name_tw, "weight": "bold", "align": "center", "wrap": true, "size": "xl", }, {"type": "separator", "margin": "sm"}, { "type": "box", "layout": "horizontal", "contents": [ { "type": "box", "layout": "vertical", "contents": [ { "type": "text", "text": "可借車位", "wrap": true, "align": "center", "size": "xl", }, { "type": "text", "text": available_spaces, "align": "center", "wrap": true, "size": "3xl", "weight": "bold", "color": "#00FF00", }, ], }, { "type": "box", "layout": "vertical", "contents": [ { "type": "text", "text": "可還車位", "align": "center", "wrap": true, "size": "xl", }, { "type": "text", "text": empty_spaces, "weight": "bold", "size": "3xl", "wrap": true, "align": "center", "color": "#FF0000", }, ], }, ], "margin": "sm", }, { "type": "text", "text": "和您距離:{}公尺".format(distance), "wrap": true, "align": "center", }, {"type": "separator", "margin": "md"}, { "type": "text", "text": "更新時間:{}".format(updated_at), "margin": "md", "align": "center", }, { "type": "button", "action": { "type": "postback", "label": "查看路線", "data": "route_{},{},{},{}".format( user_lat, user_lng, lat, lng ), }, "style": "primary", "margin": "md", "color": "#4A89F3", }, ], }, } contents.append(bubble) else: if int(empty_spaces) > 0: bubble = { "type": "bubble", "hero": { "type": "image", "url": "https://i.imgur.com/4vK8avw.png", "size": "full", "aspectRatio": "1.7:1", "aspectMode": "cover", "action": {"type": "uri", "uri": "http://linecorp.com/"}, }, "body": { "type": "box", "layout": "vertical", "contents": [ { "type": "text", "text": name_tw, "weight": "bold", "align": "center", "wrap": true, "size": "xl", }, {"type": "separator", "margin": "sm"}, { "type": "box", "layout": "horizontal", "contents": [ { "type": "box", "layout": "vertical", "contents": [ { "type": "text", "text": "可借車位", "wrap": true, "align": "center", "size": "xl", }, { "type": "text", "text": available_spaces, "align": "center", "wrap": true, "size": "3xl", "weight": "bold", "color": "#00FF00", }, ], }, { "type": "box", "layout": "vertical", "contents": [ { "type": "text", "text": "可還車位", "align": "center", "wrap": true, "size": "xl", }, { "type": "text", "text": empty_spaces, "weight": "bold", "size": "3xl", "wrap": true, "align": "center", "color": "#FF0000", }, ], }, ], "margin": "sm", }, { "type": "text", "text": "和您距離:{}公尺".format(distance), "wrap": true, "align": "center", }, {"type": "separator", "margin": "md"}, { "type": "text", "text": "更新時間:{}".format(updated_at), "margin": "md", "align": "center", }, { "type": "button", "action": { "type": "postback", "label": "查看路線", "data": "route_{},{},{},{}".format( user_lat, user_lng, lat, lng ), }, "style": "primary", "margin": "md", "color": "#4A89F3", }, ], }, } contents.append(bubble) while contents: temp = contents[:10] flex_message = FlexSendMessage( alt_text="腳踏車列表", contents={"type": "carousel", "contents": temp} ) message_contents.append(flex_message) contents = contents[10:] if len(message_contents) > 5: message_contents = message_contents[:5] return message_contents
""" ## 停车点处理 ### 得出停车点 LATITUDE范围 bike_fence['MIN_LATITUDE'] = bike_fence['FENCE_LOC'].apply(lambda x: np.min(x[:, 1])) bike_fence['MAX_LATITUDE'] = bike_fence['FENCE_LOC'].apply(lambda x: np.max(x[:, 1])) ### 得到停车点 LONGITUDE范围 bike_fence['MIN_LONGITUDE'] = bike_fence['FENCE_LOC'].apply(lambda x: np.min(x[:, 0])) bike_fence['MAX_LONGITUDE'] = bike_fence['FENCE_LOC'].apply(lambda x: np.max(x[:, 0])) from geopy.distance import geodesic ### 根据停车点范围计算具体的面积 bike_fence['FENCE_AREA'] = bike_fence.apply( lambda x: geodesic( (x['MIN_LATITUDE'], x['MIN_LONGITUDE'], x['MAX_LATITUDE'], x['MAX_LONGITUDE']) ).meters, axis=1 ) ### 根据停车点 计算中心经纬度 bike_fence['FENCE_CENTER'] = bike_fence['FENCE_LOC'].apply( lambda x: np.mean(x[:-1, ::-1], 0) ) # Geohash经纬度匹配 # -----------------------------
def distance_travelled(coords): return geodesic((coords[0], coords[1]), (coords[2], coords[3])).km
def update_lightning_strikes(): """Get the latest data from FMI and update the states.""" _LOGGER.debug(f"FMI: Lightning started") loc_time_list = [] home_cords = (self.latitude, self.longitude) start_time = datetime.today() - timedelta(days=LIGHTNING_DAYS_LIMIT) ## Format datetime to string accepted as path parameter in REST start_time = str(start_time).split(".")[0] start_time = datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S") start_time_uri_param = f"starttime={str(start_time.date())}T{str(start_time.time())}Z&" ## Get Bounding Box coords bbox_coords = get_bounding_box(self.latitude, self.longitude, half_side_in_km=BOUNDING_BOX_HALF_SIDE_KM) bbox_uri_param = f"bbox={bbox_coords.lon_min},{bbox_coords.lat_min},{bbox_coords.lon_max},{bbox_coords.lat_max}&" base_url = BASE_URL + start_time_uri_param + bbox_uri_param _LOGGER.debug(f"FMI: Lightning URI - {base_url}") ## Fetch data response = requests.get(base_url, timeout=TIMEOUT_LIGHTNING_PULL_IN_SECS) root = ET.fromstring(response.content) loop_timeout = time.time() + (LIGHTNING_LOOP_TIMEOUT_IN_SECS*1000) for child in root.iter(): if child.tag.find("positions") > 0: clean_text = child.text.lstrip() val_list = clean_text.split("\n") num_locs = 0 for loc_indx, val in enumerate(val_list): if val != "": val_split = val.split(" ") lightning_coords = (float(val_split[0]), float(val_split[1])) distance = 0 try: distance = geodesic(lightning_coords, home_cords).km except: _LOGGER.info(f"Unable to find distance between {lightning_coords} and {home_cords}") add_tuple = (val_split[0], val_split[1], val_split[2], distance, loc_indx) loc_time_list.append(add_tuple) num_locs += 1 if time.time() > loop_timeout: break elif child.tag.find("doubleOrNilReasonTupleList") > 0: clean_text = child.text.lstrip() val_list = clean_text.split("\n") for indx, val in enumerate(val_list): if val != "": val_split = val.split(" ") exist_tuple = loc_time_list[indx] if indx == exist_tuple[4]: add_tuple = (exist_tuple[0], exist_tuple[1], exist_tuple[2], exist_tuple[3], val_split[0], val_split[1], val_split[2], val_split[3]) loc_time_list[indx] = add_tuple if time.time() > loop_timeout: break else: print("Record mismtach - aborting query!") break ## First sort for closes entries and filter to limit loc_time_list = sorted(loc_time_list, key=(lambda item: item[3])) ## distance _LOGGER.debug(f"FMI - Coords retrieved for Lightning Data- {len(loc_time_list)}") loc_time_list = loc_time_list[:LIGHTNING_LIMIT] ## Second Sort based on date loc_time_list = sorted(loc_time_list, key=(lambda item: item[2]), reverse=True) ## date geolocator = Nominatim(user_agent="fmi_hassio_sensor") ## Reverse geocoding loop_start_time = datetime.now() op_tuples = [] for indx, v in enumerate(loc_time_list): loc = str(v[0]) + ", " + str(v[1]) loc_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(v[2]))) try: location = geolocator.reverse(loc, language="en").address except Exception as e: _LOGGER.info(f"Unable to reverse geocode for address-{loc}. Got error-{e}") location = loc ## Time, Location, Distance, Strikes, Peak Current, Cloud Cover, Ellipse Major op = FMILightningStruct(time_val=loc_time, location=location, distance=v[3], strikes=v[4], peak_current=v[5], cloud_cover=v[6], ellipse_major=v[7]) op_tuples.append(op) loop_end_time = datetime.now() self.lightning_data = op_tuples _LOGGER.debug(f"FMI: Lightning ended")
def findDistanceBetween(latA, lngA, latB, lngB): return geodesic((latA, lngA), (latB, lngB)).km
def product_details_from_id(self, product_id): query = {'verbosity': 'full'} response = requests.get( f'https://hackzurich-api.migros.ch/products/{product_id}', params=query, auth=HTTPBasicAuth('hackzurich2020', 'uhSyJ08KexKn4ZFS')) original_product = response.json() # product name print(original_product) original_product_name = original_product['name'] # get nutrients nutrients = {} for nutrient in original_product['nutrition_facts']['standard'][ 'nutrients']: if nutrient['name'] == 'Energie': nutrients['energy'] = nutrient['quantity'] elif nutrient['name'] == 'davon Zucker': nutrients['sugars'] = nutrient['quantity'] elif nutrient['name'] == 'davon gesättigte Fettsäuren': nutrients['saturated_fat'] = nutrient['quantity'] elif nutrient['name'] == 'Salz': nutrients['sodium'] = nutrient['quantity'] original_product_nutri_score = simplified_nutriscore(nutrients) # origin origin_string = '-'.join(original_product['origins'].values()) for country_name in self.country_names_german: if country_name.lower() in origin_string.lower(): location_origin_str = country_name continue location_origin = self.geolocator.geocode(location_origin_str) location_ch = self.geolocator.geocode("Schweiz") original_product_origin_distance = geodesic( (location_origin.latitude, location_origin.longitude), (location_ch.latitude, location_ch.longitude)).kilometers # category original_product_category = original_product['categories'][0]['code'] # rating original_product_rating = original_product['ratings']['average_all'] # price original_product_price = original_product['price']['item']['price'] # original_product_base_price = original_product['price']['base']['price'] # quantity/unit original_product_quantity = original_product['price']['item'][ 'quantity'] original_product_unit = original_product['price']['item']['unit'] quantitiy_string = original_product['price']['item'][ 'display_quantity'] original_product_display_quantity = self.parse_quantity( quantitiy_string) # label available? original_product_label = False if 'labels' in original_product: original_product_label = original_product['labels'][0] in [ "CO2", "L02", "L03", "L04", "L05", "L06", "L07", "L09", "L10", "L14", "L16", "L17", "L28", "L29", "L33", "L34", "L35", "L36", "L38", "L41", "L42", "L43", "L44", "L45", "L46", "L55", "L56", "L57", "L59", "L60", "L62", "L64", "L65", "L67", "L68", "L69", "L71", "TIW" ] # product picture URL original_product_picture_url = original_product['image']['original'] return { 'product_id': product_id, 'product_name': original_product_name, 'origin_distance_km': original_product_origin_distance, 'has_label': original_product_label, 'product_category': original_product_category, 'customer_rating': original_product_rating, 'nutri_score': original_product_nutri_score, 'price': original_product_price, 'picture_url': original_product_picture_url, 'quantity': original_product_quantity, 'display_quantity': quantitiy_string, 'base_quantity': original_product_display_quantity['quantity'], 'base_unit': original_product_display_quantity['unit'], 'base_price': original_product_price / original_product_display_quantity['quantity'], 'unit': original_product_unit, }
def get_distance(p1, p2): ''' ''' print(f"两点相差{int(geodesic(p1,p2).m)}米!\n") return int(geodesic(p1,p2).m)
import csv from geopy.distance import geodesic import datetime import pandas as pd print(geodesic((30.28708,120.12802999999997), (28.7427,115.86572000000001)).m) #计算两个坐标直线距离 print(geodesic((30.28708,120.12802999999997), (28.7427,115.86572000000001)).km) #计算两个坐标直线距离 csv_file=csv.reader(open('滴滴数据.csv','r')) id=[] distance=[] time_all=[] speed=[] starttime=[] for line in csv_file: print(line) try: userid=line[0] p1=str(line[2]) p2=str(line[3]) a1=(float(p1.split(",")[1]),122) a2 = (float(p2.split(",")[1]),122) a3=(0,float(p1.split(",")[0])) a4=(0,float(p2.split(",")[0])) p_distance=geodesic(a1, a2).km+geodesic(a3,a4).km time1=line[6] time2=line[7] datetime1 = datetime.datetime.strptime(time1, '%Y/%m/%d %H:%M') datetime2 = datetime.datetime.strptime(time2, '%Y/%m/%d %H:%M') time=(datetime2-datetime1).seconds/60 v = float(p_distance) / (float(time) + 0.1) *60
def vzdalenost(self, odkud, kam): """ metoda vrátí vzdálenost mezi dvěmi body v kilometrech argumenty odkud, kam, každý je tvořený dvojicí GPS souřadnic """ return (geodesic(odkud, kam).km)
def in_radius(self, cords, radius): return geodesic((self.longitude, self.latitude), cords).km < radius
def distance_function(a, b): return distance.geodesic(a, b).meters
lyft_url = "http://www.lyft.com" ebay_url = "https://www.ebay.com/sch/i.html?_from=R40&_trksid=m570.l1313&_nkw=" for each_item in craigs_main_posts: item_url = each_item.attrs["href"] craigs_resp = requestwrap.err_web(item_url) craigs_soup = BeautifulSoup(craigs_resp.text, "html.parser") googurl = craigs_soup.find("a", href=mapsre) try: lat, lon, _ = googurl.attrs["href"].split("@")[1].split("z")[0].split( ",") except AttributeError: print(f"{each_item.text} was likely deleted") pass miles = geodesic((start, end), (lat, lon)).miles ebay_path = ( f"{each_item.text}&_sacat=0&LH_TitleDesc=0&_osacat=0&_odkw={each_item.text}" ) ebay_query_url = ebay_url + ebay_path ebay_resp = requestwrap.err_web(ebay_query_url) ebay_soup = BeautifulSoup(ebay_resp.text, "html.parser") item = ebay_soup.find("h3", { "class": "s-item__title" }).get_text(separator=" ") price = ebay_soup.find("span", {"class": "s-item__price"}).get_text() print(f'"{each_item.text}" is Free on Craigslist, is selling for {price}' f" on Ebay and is {miles:.2f} miles away from you.")
def get_distance(self, points, point): (la1, lo1) = points[point[0]] (la2, lo2) = points[point[1]] return geodesic((lo1, la1), (lo2, la2)).kilometers
def dis(): c = (48.94916729374633, 2.712422235848561) p = (48.961171845281726, 2.6660100518582475) return geodesic(c, p).km * 1000
def export_final_data(paths, extrapolate=False, extrapolation_interval_in_secs=None): final_data = [] for entry in paths: json_path = entry[0] id = entry[1] with open(json_path) as data: json_data = json.load(data) for loc_dict in json_data['timelineObjects']: if 'placeVisit' in loc_dict.keys(): static_dict = loc_dict['placeVisit'] start_time = int( static_dict['duration']['startTimestampMs']) end_time = int(static_dict['duration']['endTimestampMs']) coordinates = [] # coordinates have tuples like ((lat, long), confidence) # confidence for central point is 1 # confidence for other points is confidence/100 if 'centerLatE7' in static_dict.keys(): coordinates.append( ((static_dict['centerLatE7'] / 1e7, static_dict['centerLngE7'] / 1e7), 100)) loc = static_dict['location'] if 'latitudeE7' in loc.keys(): coordinates.append(((loc['latitudeE7'] / 1e7, loc['longitudeE7'] / 1e7), loc['locationConfidence'])) if 'otherCandidateLocations' in static_dict.keys(): for other_loc_dict in static_dict[ 'otherCandidateLocations']: coordinates.append( ((other_loc_dict['latitudeE7'] / 1e7, other_loc_dict['longitudeE7'] / 1e7), other_loc_dict['locationConfidence'])) # now create the entry for these coordinates create_entry(coordinates, start_time, end_time, id, final_data) # some json files have childVisits too, check it and process # maybe write a similar function with same logic as above if 'childVisits' in static_dict.keys(): for dict in static_dict['childVisits']: child_start_time = int( dict['duration']['startTimestampMs']) child_end_time = int( dict['duration']['endTimestampMs']) child_coordinates = [] if 'centerLatE7' in dict.keys(): child_coordinates.append( ((dict['centerLatE7'] / 1e7, dict['centerLngE7'] / 1e7), 100)) loc = dict['location'] child_coordinates.append( ((loc['latitudeE7'] / 1e7, loc['longitudeE7'] / 1e7), loc['locationConfidence'])) if 'otherCandidateLocations' in dict.keys(): for child_other_loc_dict in static_dict[ 'otherCandidateLocations']: child_coordinates.append( ((child_other_loc_dict['latitudeE7'] / 1e7, child_other_loc_dict['longitudeE7'] / 1e7), child_other_loc_dict[ 'locationConfidence'])) create_entry(child_coordinates, child_start_time, child_end_time, id, final_data) # we now have the data for one static location point. start_time, end_time are for which this is recorded # coordinates are with their confidences, there is also a central coordinate with confidence 1 # process this as individual coordinates for this specific duration (start_time, end_time being the same for all) elif 'activitySegment' in loc_dict.keys(): dynamic_dict = loc_dict['activitySegment'] dynamic_start_time = int( dynamic_dict['duration']['startTimestampMs']) dynamic_end_time = int( dynamic_dict['duration']['endTimestampMs']) path = [] confidence = -100 * bell_func(0.5) # path have the same format except that they have confidence as -1 path.append( ((dynamic_dict['startLocation']['latitudeE7'] / 1e7, dynamic_dict['startLocation']['longitudeE7'] / 1e7), confidence)) if 'waypointPath' in dynamic_dict.keys(): for waypoint in dynamic_dict['waypointPath'][ 'waypoints']: if 'latE7' in waypoint.keys(): confidence = -100 * bell_func(-2) path.append( ((waypoint['latE7'] / 1e7, waypoint['lngE7'] / 1e7), confidence)) else: confidence = -100 * bell_func(-2) path.append(((waypoint['latitudeE7'] / 1e7, waypoint['longitudeE7'] / 1e7), confidence)) confidence = -100 * bell_func(0.5) path.append( ((dynamic_dict['endLocation']['latitudeE7'] / 1e7, dynamic_dict['endLocation']['longitudeE7'] / 1e7), confidence)) # extrapolate the data if flag is passed -- we do this by connecting paths from startpoint to endpoints # and interval for which the extrapolation needs to be done if extrapolate: path_len = path_length(path) journey_time = (dynamic_end_time - dynamic_start_time) / 1000 speed = path_len / journey_time new_points = [] for i in range(len(path) - 1): interval_dist = speed * extrapolation_interval_in_secs start = path[i][0] end = path[i + 1][0] segmented = False print("segment", i + 1) while not segmented: # segment the interval between path[i] and path[i+1] # print((start[1], start[0]), (end[1], end[0])) dist = geodesic((start[1], start[0]), (end[1], end[0])).meters if dist: param_t = interval_dist / dist # we define a parameter t which will define the equation of the line since the scales of distances # are not equal -- meaning geodesic and (d1**2 + d2**2)**0.5 won't give results # and latitude and longitude might overflow else: segmented = True continue if param_t < 1 and param_t > 0: gen_lat = (1 - param_t ) * start[0] + param_t * end[0] gen_long = (1 - param_t ) * start[1] + param_t * end[1] print(gen_lat, gen_long) confidence = -100 * bell_func(-8) new_points.append( ((gen_lat, gen_long), confidence)) start = (gen_lat, gen_long) continue else: segmented = True continue path += new_points create_entry(path, dynamic_start_time, dynamic_end_time, id, final_data) return final_data
plane_tdoa_int = float2Int(plane_tdoa) enc_plane_tdoa_int = vhe.encrypt_distance(EncTDOA_M, plane_tdoa_int) # 加密的飞机的tdoa和网格的tdoa逐条计算密文欧式距离求top5的index top_k_index = calcEncTopKInex(enc_plane_tdoa_int, EncTDOA, EncTDOA_H) # 根据索引 再Coor中寻找对应的坐标数据累加 encSumCoor = calcEncLocationEstimationSum(top_k_index, EncCOOR) # 对密文位置和解密 decSumCoor = vhe.decrypt(EncCOOR_S, encSumCoor) # 解密后再缩小放大倍数 再取均值 predictCoor = decSumCoor / (5 * S2NS) # 然后转换为llh坐标 predictLLH = ecef2llh(predictCoor) error_llh = geodesic((lat, lon), (predictLLH[0], predictLLH[1])).m print("预测的LLH位置和飞机的LLH位置的误差为%.2fm" % error_llh) error.append(error_llh) predictError = predictError.append([{ 'id': id, 'latitude': lat, "longitude": lon, 'height': height, 'predictLatitude': predictLLH[0], 'predictLongitude': predictLLH[1], 'predictHeight': predictLLH[2], 'error': error_llh }]) if (index + 1) % 10 == 0: print("完成%d条消息预测,进度为:%.2f%%" % (index + 1, ((index + 1) / len(messages)) * 100))