def find_weighted_direction(self): """ Finds the weighted average of the directions to the nearest two roads. Used by the "weighted" algorithm :return: """ sql = self.base_sql.format(**{ 'lat': self.geo.latitude, "lon": self.geo.longitude, "limit": 2 }) result = db.engine.execute(sql) distance = [] direction = [] i = 0 for row in result: distance.append(round(row['distance'], 1)) direction.append(round(row['azimuth'], 1)) i += 1 new_dir = (direction[0] * distance[1] + direction[1] * distance[0]) / (distance[0] + distance[1]) if abs(direction[0] - direction[1]) < 180: new_dir += 180.0 print("new dir = " + str(new_dir)) return new_dir
def GPStoKM(GPS_data): distance = [] latitude = GPS_data[0, 0] longitude = GPS_data[0, 1] for idx in range(len(GPS_data)): latitude_new = GPS_data[idx, 0] longitude_new = GPS_data[idx, 1] coords_1 = (latitude_new, longitude_new) coords_2 = (latitude, longitude) result = geopy.distance.distance(coords_1, coords_2).km distance.append(result) latitude = latitude_new longitude = longitude_new distance.remove(0) # remove the first distance return np.array(distance)
def calculate_distance_to_each_sensor(): coll_name = os.path.join( obs_merge_path, os.path.basename('merged_obs_sim_features_ref_plus_dist.csv')) supp_name = os.path.join(obs_merge_path, os.path.basename('ef_feature_info.csv')) coll_save_name = os.path.join(obs_merge_path, os.path.basename('obs_mask_distance.csv')) supp_data = pd.read_csv(supp_name) mask_lons = supp_data.loc[:, 'mask_lon'] mask_lats = supp_data.loc[:, 'mask_lat'] data = pd.read_csv(coll_name) locations = data.groupby( ['lon', 'lat']).size().reset_index().rename(columns={0: 'count'}) new_cols = supp_data.loc[:, 'column_name'].values dis_thres = np.arange(100) new_cols = ['ef_dist_{:02}'.format(i + 1) for i in dis_thres] new_feature_distance = [] for index, row in locations.iterrows(): print('Working on Row {}'.format(index)) obs_lon = row['lon'] obs_lat = row['lat'] distance = [] for lon, lat in zip(mask_lons, mask_lats): distance.append( geopy.distance.distance((lat, lon), (obs_lat, obs_lon)).km) new_feature_distance.append(distance) fea_select = [] for thres in dis_thres: column_indx = [ i for i in range(len(distance)) if (distance[i] > thres) & (distance[i] < thres + 1) ] this_em_diff_names = supp_data.loc[column_indx, 'column_name'].values fea_select.append(this_em_diff_names) locations.loc[index, new_cols] = fea_select locations.to_csv(coll_save_name)
df=df[(df["pickup_longitude"]>=-180)] df=df[(df["pickup_longitude"]<=180)] df=df[(df["pickup_latitude"]>=-90)] df=df[(df["pickup_latitude"]<=90)] df=df[(df["dropoff_longitude"]>=-180)] df=df[(df["dropoff_longitude"]<=180)] df=df[(df["dropoff_latitude"]>=-90)] df=df[(df["dropoff_latitude"]<=90)] df.index=[k for k in range(df.shape[0])] #Coordonnées en distance, faut installer geopy c'est pas mal distance=[] for k in range(df.shape[0]): coords_pickup = (df['pickup_latitude'][k],df['pickup_longitude'][k]) coords_dropoff = (df['dropoff_latitude'][k],df['dropoff_longitude'][k]) distance.append(geopy.distance.vincenty(coords_pickup,coords_dropoff).km) def truncate(f, n): s = '{}'.format(f) if 'e' in s or 'E' in s: return '{0:.{1}f}'.format(f, n) i, p, d = s.partition('.') return '.'.join([i, (d+'0'*n)[:n]]) for i in range(len(distance)): distance[i]=float(truncate(distance[i],3)) df["distance"]=distance df=df[df["distance"]>0.1] df=df[df["distance"]<=100] df=df[df['fare_amount']>1]
print(finish) df = pd.DataFrame(columns=['lon', 'lat', 'alt']) for point in data: df = df.append({'lon': point.longitude, 'lat' : point.latitude, 'alt' : point.elevation}, ignore_index=True) # get parameters from gpx import geopy.distance distance = []; elevation = []; for i in range(len(df)-1): coords_1 = [df['lat'][i], df['lon'][i]] coords_2 = [df['lat'][i+1], df['lon'][i+1]] distance.append(geopy.distance.vincenty(coords_1, coords_2).miles) elevation.append( df['alt'][i+1]-df['alt'][i]) total_distance = np.sum(distance) elevation = np.array(elevation) gain = np.sum(elevation[np.where( elevation > 0 )]) loss = np.sum(elevation[np.where( elevation < 0 )]) gain_ft = gain*3.28084 loss_ft = loss*3.28084 longitude = df['lon'].mean() latitude = df['lat'].mean() high = df['alt'].max() low = df['alt'].min() X_test = {'length' : total_distance, 'ascent' : gain_ft, 'descent' : loss_ft,
# URL = 'http://google.com/search?q=distance from '+a+' to '+b # content = requests.get(URL) # soup = BeautifulSoup(content.text, 'html.parser') # contentTable =soup.find('div',{"class": "BNeawe deIvCb AP7Wnd"}) # z=re.findall("\((.+)\)",contentTable.get_text()) # if(len(z)!=0): # k=z[0].split()[0] # else: # k=0 if str(a) + "-" + str(b) in existing_dist: sub_distance.append(distance_json[str(a) + "-" + str(b)]) elif str(b) + "-" + str(a) in existing_dist: sub_distance.append(distance_json[str(b) + "-" + str(a)]) else: sub_distance.append(0.0) distance.append(sub_distance) with open('data.json', 'w') as f: json.dump(json_data, f, indent=4) f.close() a = dict_p[source] k = df.iloc[a, 1:3] coords_1 = (k["long"], k["lati"]) import sys class Graph(): global coords_1
def get_user_loaction(request): fire_ec = fire_economic_data.objects.all() damage = "" area = "" city = "" country = "" city_country_list = [] count = 0 for r in fire_ec: damage = r.details area = float(r.area_expected) city = r.city country = r.country city_country_list.append(r.city + "," + r.country) count = fire_info.objects.filter(city=city, country=country).count() + 1 area = round(area * count, 2) #print("Burning : "+str(count+1)+"km") if 'lat' in request.POST: lat = request.POST['lat'] lon = request.POST['lon'] url_ = "http://api.airpollutionapi.com/1.0/aqi?lat=" + lat + "&lon=" + lon + "&APPID=oki0j2l7pq9h5p5pnvfh613ml7" r = requests.get(url=url_) data = r.json() co_index_bad = False if float(data['data']["aqiParams"][4]['aqi']) > 200: co_index_bad = True co_index_data = data['data']["alert"] #co_index_data = "dummy" coordinates = [(lat, lon)] coords_1 = (lat, lon) #print(coordinates) result = rg.search(coordinates) low = True user_city = result[0]['city'] user_country = result[0]['country'] fire_inf = fire_info.objects.filter(city=user_city, country=user_country) distance = [] if len(fire_inf) != 0: for d in fire_inf: coords_2 = (d.latitude, d.longitude) distance.append( round(geopy.distance.geodesic(coords_1, coords_2).km, 2)) if max(distance) < 10 or co_index_bad == True: low = False else: low = True if low == True: #return render(request,"fire_main/index.html",{'response':'LOW'}) return render( request, "fire_main/index.html", { 'response': 'LOW', 'damage': damage, 'area': area, 'city': city, 'country': country, 'city_country_list': city_country_list, 'count': count, 'co_index_data': co_index_data }) else: return render( request, "fire_main/index.html", { 'response': 'High', 'damage': damage, 'area': area, 'city': city, 'country': country, 'city_country_list': city_country_list, 'count': count, 'co_index_data': co_index_data }) elif 'location' in request.POST: data = request.POST['location'].split(",") count = fire_info.objects.filter(city=data[0], country=data[1]).count() + 1 fire = fire_economic_data.objects.filter(city=data[0], country=data[1]) city = data[0] country = data[0] print(count) for f in fire: damage = f.details area = float(f.area_expected) area = round(area * count, 2) return render( request, "fire_main/index.html", { 'response': 'Undefined', 'damage': damage, 'area': area, 'city': city, 'country': country, 'city_country_list': city_country_list, 'count': count }) else: return render(request, "fire_main/index.html", { 'damage': damage, 'area': area, 'city': city, 'country': country })
def results(): if request.method == 'POST': input_location = request.form.get('location') input_date = request.form.get('date') t = request.form.get('date') t = dateparser.parse(t) if t < datetime.datetime.now(): return reload_after_error("Whoops, looks like you chose a time that's already happened!") if t > datetime.datetime.now() + datetime.timedelta(days=365): return reload_after_error("Whoops, looks like you chose a time that's too far in the future.") location = geocode_location(input_location) if location[0] is None: return reload_after_error("We can't find that location on the map. Please try again.") if (location[0] > 40.95 or location[0] < 36.97 or location[1] < -109.03 or location[1] > -102.00) : return reload_after_error("That location isn't in Colorado! Please try again.") lat = location[0] lon = location[1] sql_query = """ SELECT * FROM site_locations; """ query_results = pd.read_sql_query(sql_query,con) site_no = query_results["site_no"] site_lat = query_results["dec_lat_va"] site_long = query_results["dec_long_va"] sites_coord = pd.DataFrame([site_no, site_lat, site_long]) sites_coord = sites_coord.T distance = [] for i in range(len(sites_coord)) : distance.append(geopy.distance.distance(location, sites_coord.iloc[i,1:]).miles) query_results["distance"] = distance site_no = pd.DataFrame(site_no) site_no["distance"] = distance query_results = query_results.sort_values(by = ["distance"]) site_no = site_no.sort_values(by = ["distance"]) count = 0 loc_lat = list() loc_lon = list() flow = list() flow_upper = list() flow_lower = list() good_site = list() good_site_nm = list() good_dist = list() for i in range(len(site_no)) : sql_query_model = """ SELECT * FROM n"""+site_no['site_no'].iloc[i]+"""_forecast; """ query_results_model = pd.read_sql_query(sql_query_model,con) if (t == query_results_model['ds']).any() : temp = query_results_model.loc[query_results_model['ds'] == t] if (temp['yhat_rescaled'].iloc[0] > 100 and temp['yhat_rescaled'].iloc[0] < 400) : loc_lat.append(float(query_results[i:i+1]["dec_lat_va"])) loc_lon.append(float(query_results[i:i+1]["dec_long_va"])) good_site.append(query_results[i:i+1]["site_no"].iloc[0]) good_site_nm.append(query_results[i:i+1]["station_nm"].iloc[0]) good_dist.append(query_results[i:i+1]["distance"].iloc[0]) flow.append(temp['yhat_rescaled'].iloc[0]) flow_upper.append(temp['yhat_upper_rescaled'].iloc[0]) flow_lower.append(temp['yhat_lower_rescaled'].iloc[0]) count = count + 1 if count == 3 : break else : continue loc1_lat = loc_lat[0] #float(query_results[0:1]["dec_lat_va"].iloc[0]) loc1_lon = loc_lon[0] #float(query_results[0:1]["dec_long_va"].iloc[0]) location_1 = pd.DataFrame([loc1_lat, loc1_lon]) loc1_name = good_site_nm[0] #query_results[0:1]["station_nm"].iloc[0] loc1_dist = round(good_dist[0]) loc1_flow = round(flow[0]) loc1_flow_up = round(flow_upper[0]) bbox_1_1 = loc1_lon - 0.010 bbox_1_2 = loc1_lat - 0.010 bbox_1_3 = loc1_lon + 0.010 bbox_1_4 = loc1_lat + 0.010 map_url_1 = f"https://www.openstreetmap.org/export/embed.html?bbox={bbox_1_1}%2C{bbox_1_2}%2C{bbox_1_3}%2C{bbox_1_4}&layer=mapquest&marker={loc1_lat}%2C{loc1_lon}" loc2_lat = loc_lat[1] #float(query_results[1:2]["dec_lat_va"].iloc[0]) loc2_lon = loc_lon[1] #float(query_results[1:2]["dec_long_va"].iloc[0]) location_2 = pd.DataFrame([loc1_lat, loc1_lon]) loc2_name = good_site_nm[1] #query_results[1:2]["station_nm"].iloc[0] loc2_dist = round(good_dist[1]) loc2_flow = round(flow[1]) loc2_flow_up = round(flow_upper[1]) bbox_2_1 = loc2_lon - 0.010 bbox_2_2 = loc2_lat - 0.010 bbox_2_3 = loc2_lon + 0.010 bbox_2_4 = loc2_lat + 0.010 map_url_2 = f"https://www.openstreetmap.org/export/embed.html?bbox={bbox_2_1}%2C{bbox_2_2}%2C{bbox_2_3}%2C{bbox_2_4}&layer=mapnik&marker={loc2_lat}%2C{loc2_lon}" loc3_lat = loc_lat[2] #float(query_results[2:3]["dec_lat_va"].iloc[0]) loc3_lon = loc_lon[2] #float(query_results[2:3]["dec_long_va"].iloc[0]) location_3 = pd.DataFrame([loc3_lat, loc3_lon]) loc3_name = good_site_nm[2] #query_results[2:3]["station_nm"].iloc[0] loc3_dist = round(good_dist[2]) loc3_flow = round(flow[2]) loc3_flow_up = round(flow_upper[2]) bbox_3_1 = loc3_lon - 0.010 bbox_3_2 = loc3_lat - 0.010 bbox_3_3 = loc3_lon + 0.010 bbox_3_4 = loc3_lat + 0.010 map_url_3 = f"https://www.openstreetmap.org/export/embed.html?bbox={bbox_3_1}%2C{bbox_3_2}%2C{bbox_3_3}%2C{bbox_3_4}&layer=mapnik&marker={loc3_lat}%2C{loc3_lon}" return render_template('results.html', location=input_location, date=input_date, map_url_1=map_url_1, map_url_2=map_url_2, map_url_3=map_url_3, loc1_name=loc1_name, loc2_name=loc2_name, loc3_name=loc3_name, loc1_dist=loc1_dist, loc2_dist=loc2_dist, loc3_dist=loc3_dist, loc1_flow=loc1_flow, loc2_flow=loc2_flow, loc3_flow=loc3_flow, loc1_flow_up=loc1_flow_up, loc2_flow_up=loc2_flow_up, loc3_flow_up=loc3_flow_up)