def create_and_save_global_maps(stations_data): m1 = folium.Map(location=[43.6531661, -79.394812], zoom_start=13, tiles='Stamen Toner', prefer_canvas=True, detect_retina=True) m2 = folium.Map(location=[43.6531661, -79.394812], zoom_start=13, tiles='Stamen Toner', prefer_canvas=True, detect_retina=True) times = [] # [time, time, time, ...] stations = [ ] # [[[lat, lon, w], [lat, lon, w], ...], [[lat, lon, w], [lat, lon, w], ...]] stations_by_capacity = [] for time_station in stations_data: if time_station['bikes_available'] == 0: time_station['bikes_available'] = 1 station = [ time_station['lat'], time_station['lon'], time_station['bikes_available'] ] station_by_capacity = [ time_station['lat'], time_station['lon'], time_station['bikes_available'] / time_station['capacity'] ] unix_timestamp = time_station['timestamp'] unix_timestamp -= 60 * 60 * 5 # UTC to EST/EDT -5h timestamp = time.strftime('%H:%M', time.localtime(unix_timestamp)) if len(times) == 0 or timestamp != times[-1]: times.append(timestamp) stations.append([station]) stations_by_capacity.append([station_by_capacity]) else: stations[-1].append( station ) # outer lists need to correspond with timestamps index stations_by_capacity[-1].append(station_by_capacity) m1.add_child( plugins.HeatMapWithTime(stations, times, radius=30, use_local_extrema=True)) m2.add_child( plugins.HeatMapWithTime(stations_by_capacity, times, radius=30)) current_dir = os.path.dirname(os.path.abspath(__file__)) map_file_1 = os.path.join(current_dir, '../heatmap/templates/heatmap/m1.html') map_file_2 = os.path.join(current_dir, '../heatmap/templates/heatmap/m2.html') m1.save(map_file_1) m2.save(map_file_2)
def plotMapOverTime(df): # Weekday in August #df_August = df[df['Month'] == 8] #df_plot = df_plot[df_plot['Weekday'] != 5] #df_plot = df_plot[df_plot['Weekday'] != 6] #df_plot = df[df['Hour'] == 8] df_plot = df df_centre = df_plot[df_plot['End_Distance'] < 2.5] df_plot = df_centre[['End_Lat', 'End_Lng']].round(5) df_plot['Hour'] = df_centre['Hour'] #df_plot = df_plot[df_plot['hour'] == 2] m = folium.Map([51.4545, -2.58], zoom_start=13) bike_data = [[[row['End_Lat'], row['End_Lng']] for index, row in df_plot[df_plot['Hour'] == i].iterrows()] for i in range(0, 23)] hm = plugins.HeatMapWithTime(bike_data, auto_play=True, max_opacity=0.8, radius=15) hm.add_to(m) m.save("Maps/HeatMap.html")
def mapear_prediccion(): os.chdir("../Images") prediction['weight'] = [i for i in range(len(prediction))] mapa_final = folium.Map(location=[19.443056, -99.144444], zoom_start=15) geo = [[[row['latitud'], row['longitud']] for index, row in prediction[prediction['weight'] == i].iterrows()] for i in range(len(prediction))] index = ['{:%Y-%m-%d %H-%M-%S}'.format(i) for i in prediction.fecha_hechos] hm = plugins.HeatMapWithTime(geo, index=index, radius=20, auto_play=True, max_opacity=0.8, name='Robo a transeunte') hm.add_to(mapa_final) mapa_final.save('mapa_final.html')
def test_beijing_traffic(): loc_downleft = [39.7938, 116.2033] # latitude,longitude loc_upright = [40.0403, 116.5358] row = 32 col = 32 print("#################area split######################") location = area_split(loc_downleft, loc_upright, row, col) # print("location\n",location) print("location[0,0,:] = \n", location[0, 0, :]) print("location.shape ", location.shape) print("#################read small h5######################") # h5 data shape = (48, 32, 32) = (step, h, w) frname = "small.h5" print("reading ", frname) fr = h5py.File(frname, 'r') print("fr.keys() = ", fr.keys()) data = fr['small_data'][()] fr.close() print("data.shape = ", data.shape) print("data.dtype = ", data.dtype) print("max data = ", np.max(data)) print("min data = ", np.min(data)) data = (data - np.min(data)) / (np.max(data) - np.min(data)) # print ("data = ",data) print("data[0,0,:] = \n", data[0, :, 0]) print("#################array reshape######################") # move_data shape (48, 1024, 3) (step, samples, data) # data shape 3 = (lat,lon,val) # 转换过程:h5 data(48, 32, 32) to (48,32*32,3),3 =(lat,lon,val) data = data.reshape((48, 32 * 32, 1), order='C') # location = location.reshape((32 * 32, 2), order='C') # # print("location\n",location) temp = np.insert(data, 0, location[:, 1], axis=2) move_data = np.insert(temp, 0, location[:, 0], axis=2) print("move_data.shape ", move_data.shape) print("move_data[0,0:3,:] = \n", move_data[0, 0:3, :]) data2 = move_data[20:40, ] print("data2.shape ", data2.shape) # print ("data2[0,0:33,:] \n",data2[0,0:33,:]) data2 = data2.tolist() m = folium.Map( [39.9, 116.35], zoom_start=11 ) # zoom_start small or big tiles='stamentoner' #data1 data2 # m = folium.Map([35, 110], zoom_start=5)#zoom_start small or big tiles='stamentoner' #data3 hm = plugins.HeatMapWithTime(data2, radius=22) # data1 data2 # hm = plugins.HeatMapWithTime(data3,radius=8) # data3 hm.add_to(m) save_path = os.path.join(out_dir, "test_beijing.html") # 保存为html文件 m.save(save_path) print(save_path, "has been saved!") # 默认浏览器打开 webbrowser.open(save_path)
def main(): years = [i for i in range(start_year,end_year+1)] d={} #SQL connections cs = r'DSN=ed2d;Trusted_Connection=True;' cnxn = pyodbc.connect(cs) #get data for year in years: qry = """ SELECT year=%s, prm.id_latitude lat, prm.id_longitude long, sum(cga.app_sys_size_kw) capacity FROM ed2tcga_cust_gen_app cga INNER JOIN ed2tprm_premise prm on prm.prem_id = cga.prem_id WHERE current_app_status_desc = 'APPROVED' and gen_type_code = 'PV' and year(cga.app_approved_date) <= %s GROUP BY year(cga.app_approved_date), prm.id_latitude, prm.id_longitude """ % (year, year) df_temp = pd.read_sql_query(qry, cnxn) d[year] = df_temp df = pd.concat(d.values(), ignore_index=True).dropna() year_index = df.year.unique().tolist() #format data for folium heat_data = [[[row['lat'],row['long']] for index, row in df[df['year'] == i].iterrows()] for i in range(start_year,end_year+1)] #create map mp = folium.Map(location=[32.89, -117], zoom_start=10) hm = plugins.HeatMapWithTime(heat_data,auto_play=True, max_opacity=0.9, min_opacity=0.2, radius = dot_size, index=year_index, gradient=color_gradient) #save map hm.add_to(mp) mp.save(out_file_name)
def plot_in_folium(location_df, agent_df, total_days): # Creates Map around mali with respective zoom plot_map = folium.Map(location=[16.3700359, -2.2900239], zoom_start=6) # Mark camps and conflict zones in map # Ensure you are handling floats location_df['geo_lat'] = location_df['geo_lat'].astype(float) location_df['geo_lon'] = location_df['geo_lon'].astype(float) camp_conflict_df = location_df.loc[ location_df['location_type'].isin(['camp', 'conflict_zone']), ['#name', 'geo_lat', 'geo_lon', 'location_type']] camp_conflict_df['marker_color'] = camp_conflict_df.apply( lambda row: '#00C957' if row.location_type == 'camp' else '#f9424b', axis=1) # green or red based on location type for i in range(len(camp_conflict_df)): folium.CircleMarker(location=[ camp_conflict_df.iloc[i]['geo_lat'], camp_conflict_df.iloc[i]['geo_lon'] ], popup=camp_conflict_df.iloc[i]['#name'], radius=20, color=camp_conflict_df.iloc[i]['marker_color'], fill=True, fill_color=camp_conflict_df.iloc[i] ['marker_color']).add_to(plot_map) # Ensure the columns are in floats agent_df['geo_lat'] = agent_df['geo_lat'].astype(float) agent_df['geo_lon'] = agent_df['geo_lon'].astype(float) # Creates a list of list with latitude and longitude for each day # folium does not take dataframe so need to provide list input agent_coordinates = [[ [row['geo_lat'], row['geo_lon']] for index, row in agent_df[agent_df['day'] == i].iterrows() ] for i in range(int(agent_df['day'].min()), int(agent_df['day'].max() + 1))] # Plot it on the map hm = plugins.HeatMapWithTime(agent_coordinates) hm.add_to(plot_map) # save the map plot as html output_path = Path(output_dir).joinpath('mali_map_all_agents.html') plot_map.save(output_path) print(f'Map generated in: {output_path}')
def create_map(data, map_name): ''' data - str: the file path to the csv map_name - str: name of the file you would like to save map html in ''' df = pd.read_csv(data) df.drop(['code', 'captive', 'battery_level', 'location_group'], axis=1, inplace=True) df['time_group_seconds'] =(pd.to_datetime(df['time_group']) - datetime.datetime(1970,1,1)).dt.total_seconds() df['date'] = pd.to_datetime(df['time_group']).dt.date df = add_rounded_time(df) df = drop_repeated_data(df) df = add_lat_long(df) df['count'] = 1 df['grid_location'] = 0 df = add_day_of_week(df) df = add_rounded_time(df) thurs_df = df[df['day_of_week'] == 'Thursday'].copy() latlon_list = [] thurs_df['latlong'] = list(map(list, zip(thurs_df['latitude'], thurs_df['longitude']))) for i in range(len(thurs_df['time_of_day'].unique())): latlon_list.append(thurs_df[thurs_df['time_of_day'] == thurs_df['time_of_day'].unique()[i]]['latlong'].tolist()) for i in range(21): latlon_list += [latlon_list.pop(0)] index_list = ['4:00am, ' + str(len(latlon_list[0])) + ' scooters', '4:15am, ' + str(len(latlon_list[1])) + ' scooters', '4:30am, ' + str(len(latlon_list[2])) + ' scooters', '4:45am, ' + str(len(latlon_list[3])) + ' scooters', '5:00am, ' + str(len(latlon_list[4])) + ' scooters', '5:15am, ' + str(len(latlon_list[5])) + ' scooters', '5:30am, ' + str(len(latlon_list[6])) + ' scooters', '5:45am, ' + str(len(latlon_list[7])) + ' scooters', '6:00am, ' + str(len(latlon_list[8])) + ' scooters', '6:15am, ' + str(len(latlon_list[9])) + ' scooters', '6:30am, ' + str(len(latlon_list[10])) + ' scooters', '6:45am, ' + str(len(latlon_list[11])) + ' scooters', '7:00am, ' + str(len(latlon_list[12])) + ' scooters', '7:15am, ' + str(len(latlon_list[13])) + ' scooters', '7:30am, ' + str(len(latlon_list[14])) + ' scooters', '7:45am, ' + str(len(latlon_list[15])) + ' scooters', '8:00am, ' + str(len(latlon_list[16])) + ' scooters', '8:15am, ' + str(len(latlon_list[17])) + ' scooters', '8:30am, ' + str(len(latlon_list[18])) + ' scooters', '8:45am, ' + str(len(latlon_list[19])) + ' scooters', '9:00am, ' + str(len(latlon_list[20])) + ' scooters', '9:15am, ' + str(len(latlon_list[21])) + ' scooters', '9:30am, ' + str(len(latlon_list[22])) + ' scooters', '9:45am, ' + str(len(latlon_list[23])) + ' scooters', '10:00am, ' + str(len(latlon_list[24])) + ' scooters', '10:15am, ' + str(len(latlon_list[25])) + ' scooters', '10:30am, ' + str(len(latlon_list[26])) + ' scooters', '10:45am, ' + str(len(latlon_list[27])) + ' scooters', '11:00am, ' + str(len(latlon_list[28])) + ' scooters', '11:15am, ' + str(len(latlon_list[29])) + ' scooters', '11:30am, ' + str(len(latlon_list[30])) + ' scooters', '11:45am, ' + str(len(latlon_list[31])) + ' scooters', '12:00pm, ' + str(len(latlon_list[32])) + ' scooters', '12:15pm, ' + str(len(latlon_list[33])) + ' scooters', '12:30pm, ' + str(len(latlon_list[34])) + ' scooters', '12:45pm, ' + str(len(latlon_list[35])) + ' scooters', '1:00pm, ' + str(len(latlon_list[36])) + ' scooters', '1:15pm, ' + str(len(latlon_list[37])) + ' scooters', '1:30pm, ' + str(len(latlon_list[38])) + ' scooters', '1:45pm, ' + str(len(latlon_list[39])) + ' scooters', '2:00pm, ' + str(len(latlon_list[40])) + ' scooters', '2:15pm, ' + str(len(latlon_list[41])) + ' scooters', '2:30pm, ' + str(len(latlon_list[42])) + ' scooters', '2:45pm, ' + str(len(latlon_list[43])) + ' scooters', '3:00pm, ' + str(len(latlon_list[44])) + ' scooters', '3:15pm, ' + str(len(latlon_list[45])) + ' scooters', '3:30pm, ' + str(len(latlon_list[46])) + ' scooters', '3:45pm, ' + str(len(latlon_list[47])) + ' scooters', '4:00pm, ' + str(len(latlon_list[48])) + ' scooters', '4:15pm, ' + str(len(latlon_list[49])) + ' scooters', '4:30pm, ' + str(len(latlon_list[50])) + ' scooters', '4:45pm, ' + str(len(latlon_list[51])) + ' scooters', '5:00pm, ' + str(len(latlon_list[52])) + ' scooters', '5:15pm, ' + str(len(latlon_list[53])) + ' scooters', '5:30pm, ' + str(len(latlon_list[54])) + ' scooters', '5:45pm, ' + str(len(latlon_list[55])) + ' scooters', '6:00pm, ' + str(len(latlon_list[56])) + ' scooters', '6:15pm, ' + str(len(latlon_list[57])) + ' scooters', '6:30pm, ' + str(len(latlon_list[58])) + ' scooters', '6:45pm, ' + str(len(latlon_list[59])) + ' scooters', '7:00pm, ' + str(len(latlon_list[60])) + ' scooters', '7:15pm, ' + str(len(latlon_list[61])) + ' scooters', '7:30pm, ' + str(len(latlon_list[62])) + ' scooters', '7:45pm, ' + str(len(latlon_list[63])) + ' scooters', '8:00pm, ' + str(len(latlon_list[64])) + ' scooters', '8:15pm, ' + str(len(latlon_list[65])) + ' scooters', '8:30pm, ' + str(len(latlon_list[66])) + ' scooters', '8:45pm, ' + str(len(latlon_list[67])) + ' scooters', '9:00pm, ' + str(len(latlon_list[68])) + ' scooters'] m = folium.Map([37.8044, -122.2711], tiles='stamentoner', zoom_start=13) hm = plugins.HeatMapWithTime(latlon_list, index = index_list) hm.add_to(m) m.save(map_name)
def heatfromdoc(doc): import networkx as nx import osmnx as ox from folium import Map import folium.plugins as plugins ox.config(use_cache=True, log_console=True) G = ox.graph_from_place('Cologne', network_type='bike') gdf_nodes, gdf_edges = ox.graph_to_gdfs(G) timestamps = [] for row in doc: timestamps.append(row["starttime"]) timestamps = list(sorted(timestamps)) datapoints = [] cnt = 0 timeindex = [] points = [] for time in timestamps: for route in doc: try: if route["starttime"] == time: for node in route["route"]: point = [] nodepoint = gdf_nodes.loc[node] point = [nodepoint["y"], nodepoint["x"], 1] points.append(point) except: continue if cnt == 6: cnt = 0 if points != [] and cnt == 0: datapoints.append(points) timeindex.append(str(time)) points = [] cnt += 1 m = Map([50.9287107, 6.9459497], tiles="cartodbpositron", zoom_start=13) hm = plugins.HeatMapWithTime(datapoints, index=timeindex, auto_play=True, max_opacity=0.5, radius=8, use_local_extrema=True) hm.add_to(m) m.save('index.html')
def get_plot(self, parameters): qe = QueryExecutor() df = qe.get_result_dataframe(self._build_qeury(parameters)) min_lon, max_lon = df.lon.quantile([0.01, 0.99]) delta_lon = max_lon - min_lon min_lat, max_lat = df.lat.quantile([0.01, 0.99]) delta_lat = max_lat - min_lat center = ((min_lat + max_lat) / 2, (min_lon + max_lon) / 2) df = df[(df.lon >= min_lon) & (df.lon <= max_lon) & (df.lat >= min_lat) & (df.lat <= max_lat)] df['EventCount'] = log(df['EventCount']) df['EventCount'] = df['EventCount'] / df['EventCount'].max() timeline = [] months = list(sorted(df['MonthYear'].unique())) for month in months: month_counts = df[df['MonthYear'] == month][[ 'lat', 'lon', 'EventCount' ]].values timeline.append(list(map(list, month_counts))) m = folium.Map(center, tiles='stamentoner', control_scale=True, height="75%") m.fit_bounds([ [min_lon - 0.1 * delta_lon, max_lon + 0.1 * delta_lon], [min_lat - 0.1 * delta_lat, max_lat + 0.1 * delta_lat], ]) months = Utils().format_months_names(months) hm = plugins.HeatMapWithTime( data=timeline, index=months, name="heatmap", radius=0.3, scale_radius=True, overlay=True, ) hm.add_to(m) m.render() return m._repr_html_().replace('"', "'")
def plot(heatmap_data, idx, init_center): heatmap = folium.Map(location=init_center, tiles='CartoDB Positron', zoom_start=3, control_scale=True) plugins.HeatMapWithTime(data=heatmap_data, auto_play=True, max_opacity=0.8, index=idx, radius=25, name='Target').add_to(heatmap) folium.raster_layers.TileLayer('Open Street Map').add_to(heatmap) folium.raster_layers.TileLayer('Stamen Terrain').add_to(heatmap) folium.raster_layers.TileLayer('Stamen Toner').add_to(heatmap) heatmap.add_child(folium.LatLngPopup()) measure_control = plugins.MeasureControl(primary_length_unit='kilometers') heatmap.add_child(measure_control) heatmap.add_child(folium.LayerControl()) return heatmap
def initmap(): df = pd.read_csv('data/locations.csv', sep=',') df.head() df['Occupancy Percentage'] = pd.to_numeric(df['Occupancy Percentage'], errors='coerce') df['norm_occupancy'] = df['Occupancy Percentage'].apply(lambda x: x / 100) df = df.dropna() unique = df['datetime'].unique() list_data = [] for timestamp in unique: tempdf = df.loc[df['datetime'] == timestamp] templist = [[row['latitude'], row['longitude'], row['norm_occupancy']] for index, row in tempdf.iterrows()] list_data.append(templist) parking = folium.Map(location=[50.7260218, -1.8827525], zoom_start=13, tiles="stamentoner") hm = plugins.HeatMapWithTime(list_data, index=[date for date in unique], auto_play=False, radius=40, max_opacity=0.8) hm.layer_name = 'Heatmap' parking.add_child(hm) minimap = plugins.MiniMap() parking.add_child(minimap) json14 = os.path.join('data', 'carparkmap.json') #Load GeoJson folium.GeoJson(json14, name='geojson').add_to(parking) folium.LayerControl().add_to(parking) parking.save(outfile='templates/map.html')
def generate_animation(epochs=10, *args, **kwargs): location_data = load() location_data = location_data[location_data.accuracy < 1000] location_data = prepare(location_data) m = folium.Map( [location_data.latitude.median(), location_data.longitude.median()], zoom_start=9) heat_df = np.array_split(location_data, int(epochs)) # List comprehension to make out list of lists heat_data = [[[row['latitude'], row['longitude']] for index, row in _df.iterrows()] for _df in heat_df] plugins.HeatMapWithTime(heat_data, auto_play=True, max_opacity=0.8).add_to(m) m.add_child(folium.LatLngPopup()) fn = os.getcwd() + '/index.html' m.save(fn) content = get_file(fn) return Response(content, mimetype="text/html")
def heat_map_with_time(df, to_from, by, data_index, save): map_metro = folium.Map(location=[45.5122, -122.6587], zoom_start=11, max_zoom=14, tiles='Stamen', attr='Toner') heat_df = df[[ '{}Latitude'.format(to_from), '{}Longitude'.format(to_from), by ]] heat_df.dropna(inplace=True) heat_array_time = [[[row['fromLatitude'], row['fromLongitude']] for index, row in heat_df[heat_df[by] == i].iterrows()] for i in data_index] if data_index == pickup: idx = [str(i).split(' ')[0] for i in data_index] else: idx = data_index hmt = plugins.HeatMapWithTime(heat_array_time, index=idx, auto_play=False, radius=10) hmt.add_to(map_metro) map_metro.save(save)
def upload(): if request.method == 'POST' and 'data' in request.files: filename = loc_data.save(request.files['data']) df = pd.read_csv(app.config['UPLOADED_DATA_DEST'] + "/" + filename) os.remove( os.path.join(app.config['UPLOADED_DATA_DEST'] + "/" + filename)) #ensures our lat and long are numeric datatypes num_cols = ['latitude', 'longitude'] df[num_cols] = df[num_cols].apply(pd.to_numeric) #ensures our date column is a datetime object dt_cols = ['date'] df[dt_cols] = df[dt_cols].apply(pd.to_datetime) #Calculates a timedelta based and creates a new integer column. Casting sucks in python df = df.assign(days_delta=df.date - df.date.min(axis=0)) df[['days_int' ]] = (df[['days_delta']] / np.timedelta64(1, 'D')).astype(np.int64) #Generates a heatmap centered on New York heatmap = folium.Map(location=[40, 12], zoom_start=2.5) heat_data = [[ [row['latitude'], row['longitude']] for index, row in df[df['days_int'] == i].iterrows() ] for i in range(df.days_int.min(axis=0), df.days_int.max(axis=0))] #plots a HeatMapWithTime graph hm = plugins.HeatMapWithTime(heat_data, auto_play=True, max_opacity=0.7) hm.add_to(heatmap) heatmap.save('static/map.html') return str(df.days_int.max(axis=0)) return render_template('upload.html')
def showMapWithTimeAndWeight(df, radius=15, htmlFilename='myMap.html'): """Show a HeatMap representing artist density on earth per year params: df: DataFrame with columns ['artist_latitude', 'artist_longitude', 'weight'] weight must be contain between 0 and 1 ([0,1]) radius: Size of each point on the map htmlFilename: Location on the disc where to store the map """ m = folium.Map([48., 5.], tiles='stamentoner', zoom_start=2) df = df.sort_values("year") data = df.groupby("year")[[ 'artist_latitude', 'artist_longitude', "weight" ]].apply(lambda df_: df_.values.tolist()).tolist() index = df.year.astype(str).unique().tolist() assert len(data) == len(index) hm = plugins.HeatMapWithTime(data, index=index, radius=radius) hm.add_to(m) addPlugins(m) if htmlFilename: m.save(htmlFilename) return m
def folium_heat_series(map_object, data, zoom_start=11, min_opacity=0.2, radius=4, blur=2, max_zoom=1): """ map_object: The variable name of the map object that you have instantiated. data: List of lists of lists of latitude and longitude data e.g. timeseries_data_list = [[[row['Latitude'],row['Longitude']] for index, row in DataFramef[DataFrame['column'] == i].iterrows()] for i in range(1,13)] """ fp.HeatMapWithTime(heat_data_list, min_opacity=0.2, radius=7, auto_play=True, max_opacity=0.8).add_to(map_object) name = str(map_object) map_object.save(name + '.html') return map_object
def test_heat_map_with_time(): np.random.seed(3141592) initial_data = (np.random.normal(size=(100, 2)) * np.array([[1, 1]]) + np.array([[48, 5]])) move_data = np.random.normal(size=(100, 2)) * 0.01 data = [(initial_data + move_data * i).tolist() for i in range(100)] m = folium.Map([48., 5.], tiles='stamentoner', zoom_start=6) hm = plugins.HeatMapWithTime(data) m.add_child(hm) m._repr_html_() out = m._parent.render() # We verify that the script imports are present. script = '<script src="https://rawgit.com/socib/Leaflet.TimeDimension/master/dist/leaflet.timedimension.min.js"></script>' # noqa assert script in out script = '<script src="https://cdnjs.cloudflare.com/ajax/libs/heatmap.js/2.0.2/heatmap.min.js"></script>' # noqa assert script in out script = '<script src="https://rawgit.com/pa7/heatmap.js/develop/plugins/leaflet-heatmap/leaflet-heatmap.js"></script>' # noqa assert script in out script = '<link rel="stylesheet" href="http://apps.socib.es/Leaflet.TimeDimension/dist/leaflet.timedimension.control.min.css"/>' # noqa assert script in out # We verify that the script part is correct. tmpl = Template(""" var times = {{this.times}}; {{this._parent.get_name()}}.timeDimension = L.timeDimension( {times : times, currentTime: new Date(1)} ); var {{this._control_name}} = new L.Control.TimeDimensionCustom({{this.index}}, { autoPlay: {{this.auto_play}}, backwardButton: {{this.backward_button}}, displayDate: {{this.display_index}}, forwardButton: {{this.forward_button}}, limitMinimumRange: {{this.limit_minimum_range}}, limitSliders: {{this.limit_sliders}}, loopButton: {{this.loop_button}}, maxSpeed: {{this.max_speed}}, minSpeed: {{this.min_speed}}, playButton: {{this.play_button}}, playReverseButton: {{this.play_reverse_button}}, position: "{{this.position}}", speedSlider: {{this.speed_slider}}, speedStep: {{this.speed_step}}, styleNS: "{{this.style_NS}}", timeSlider: {{this.time_slider}}, timeSliderDrapUpdate: {{this.time_slider_drap_update}}, timeSteps: {{this.index_steps}} }) .addTo({{this._parent.get_name()}}); var {{this.get_name()}} = new TDHeatmap({{this.data}}, {heatmapOptions: { radius: {{this.radius}}, minOpacity: {{this.min_opacity}}, maxOpacity: {{this.max_opacity}}, scaleRadius: {{this.scale_radius}}, useLocalExtrema: {{this.use_local_extrema}}, defaultWeight: 1 , } }) .addTo({{this._parent.get_name()}}); """) assert tmpl.render(this=hm)
subset = ratings_data_vegas[ratings_data_vegas['stars'] == star] data.append(subset[['latitude', 'longitude']].values.tolist()) #initialize at vegas lat = 36.127430 lon = -115.138460 zoom_start = 11 print(" Vegas Review heatmap Animation ") # basic map m = folium.Map(location=[lat, lon], tiles="OpenStreetMap", zoom_start=zoom_start) #inprovising the Heatmapwith time plugin to show variations across star ratings hm = plugins.HeatMapWithTime(data, max_opacity=0.3, auto_play=True, display_index=True, radius=7) hm.add_to(m) m # In[ ]: end_time = time.time() print("Took", end_time - start_time, "s") # The buttons don't load properly. But you can click the play/loop button to see the various businesses based on their star ratings. # # # 5. Reviews Deep dive: # # Lets look at the top users based on the number of reviews they have given.
import folium from folium import plugins import numpy as np np.random.seed(3141592) initial_data = (np.random.normal(size=(100, 2)) * np.array([[1, 1]]) + np.array([[48, 5]])) move_data = np.random.normal(size=(100, 2)) * 0.01 print(move_data) data = [(initial_data + move_data * i).tolist() for i in range(100)] m = folium.Map([48., 5.], tiles='stamentoner', zoom_start=6) hm = plugins.HeatMapWithTime(data) hm.add_to(m) m.save('heat.html')
tiles="Stamen Toner", zoom_start=map_zoom) heat_df = crashes.loc[:, ['LATITUDE', 'LONGITUDE', 'CRASH_DATE']].dropna() # Create weight column, using date heat_df['Weight'] = pd.to_datetime(heat_df['CRASH_DATE']).dt.hour heat_df['Weight'] = heat_df['Weight'].astype(float).dropna() # List comprehension to make out list of lists heat_data = [[[row['LATITUDE'], row['LONGITUDE']] for _, row in heat_df[heat_df['Weight'] == i].iterrows()] for i in range(0, 24)] # Plot it on the map hm = plugins.HeatMapWithTime(heat_data, auto_play=False, radius=4, position="topright") hm.add_to(CHI_map_time) CHI_map_time.save("./web/folium/heat_crashes_over_time.html") # %% # Heat map over time - day CHI_map_time = folium.Map(map_location, tiles="Stamen Toner", zoom_start=map_zoom) heat_df = crashes.loc[:, ['LATITUDE', 'LONGITUDE', 'CRASH_DATE']].dropna() # Create weight column, using date heat_df['Weight'] = pd.to_datetime(heat_df['CRASH_DATE']).dt.dayofweek # heat_df['Weight'] = heat_df['Weight'].astype(float).dropna()
map_chicago = folium.Map(location=[41.830994, -87.647345], tiles="Stamen Terrain", zoom_start=10) crash2018 = crash[(crash['CRASH_DATE'] < pd.datetime(2019, 1, 1)) & (crash['CRASH_DATE'] >= pd.datetime(2018, 1, 1))].copy() heatmap = [] for i in range(1, 13): df = crash2018[crash2018['CRASH_MONTH'] == i] df1 = df.sample(int(len(df) * 0.3)) cood = [[row["LATITUDE"], row["LONGITUDE"]] for idx, row in df1.iterrows()] heatmap.append(cood) plugins.HeatMapWithTime(heatmap, radius=3, auto_play=True, max_opacity=0.8).add_to(map_chicago) map_chicago # *Dynamic heapmap showing geographic distribution of crash by hour in 2018* # In[313]: map_chicago = folium.Map(location=[41.830994, -87.647345], tiles="Stamen Terrain", zoom_start=10) crash2018 = crash[(crash['CRASH_DATE'] < pd.datetime(2019, 1, 1)) & (crash['CRASH_DATE'] >= pd.datetime(2018, 1, 1))].copy() heatmap = [] for i in range(0, 24):
data.append(subset[['latitude', 'longitude']].values.tolist()) lat = selected_restaurants['latitude'].mean() lon = selected_restaurants['longitude'].mean() zoom_start = 10 print("Animation for restaurants with different starts in %s in %s state" % (interested_city, interested_state)) # basic map m = folium.Map(location=[lat, lon], tiles="OpenStreetMap", zoom_start=zoom_start) #inprovising the Heatmapwith time plugin to show variations across star ratings hm = plugins.HeatMapWithTime(data, index=stars_list, max_opacity=0.3, auto_play=True, display_index=True, radius=10) hm.add_to(m) m # # Section 3. Explorary Data Analysis--Plots of restaurant attributes # In[13]: ##Distribution of price range in the interested city price_range = selected_restaurants.groupby('RestaurantsPriceRange2').count() range_set = list(price_range.index.values) number_set = list(price_range['business_id']) labels = ['Price Range' + str(s) for s in range_set]
heat_df = df_acc[df_acc['Speed_limit'] == '40'] # Reducing data size so it runs faster heat_df = heat_df[heat_df['Year'] == '2014'] # Reducing data size so it runs faster heat_df = heat_df[['Latitude', 'Longitude']] # Create weight column, using date heat_df['Weight'] = df_acc['Date'].str[3:5] heat_df['Weight'] = heat_df['Weight'].astype(float) heat_df = heat_df.dropna(axis=0, subset=['Latitude', 'Longitude', 'Weight']) # List comprehension to make out list of lists heat_data = [[[row['Latitude'], row['Longitude']] for index, row in heat_df[heat_df['Weight'] == i].iterrows()] for i in range(0, 13)] # Plot it on the map hm = plugins.HeatMapWithTime(heat_data, auto_play=True, max_opacity=0.8) hm.add_to(map_hooray) # Display the map map_hooray.save("C:\\Users\ACER\Desktop\\map_heatmapts.html") ############################################################ """ Plugins There are too many to demo them all but check out this notebook to see the additional plugins you can use. Likely to be of interest are MarkerCluster and Fullscreen. http://nbviewer.jupyter.org/github/python-visualization/folium/blob/master/examples/Plugins.ipynb """
return coordinates # In[10]: # get dataset data = getData() data.head() # In[11]: # get coordinates of all fire hotspots coordinates = getCoOrdinates(data) # In[12]: # plot thoose coordinates according to timeline aus_map = folium.Map([-23., 133.], zoom_start=4.5) fire_growth = plugins.HeatMapWithTime(coordinates, radius=10) fire_growth.add_to(aus_map) aus_map # In[13]: # save for future usage aus_map.save('aus_map.html') # In[ ]:
for year in range(2017, 2020): pf_accidents = pd.read_csv(CSV_DIR + '/datatran' +str(year)+ '.csv', dtype=object, sep=';', encoding='iso-8859-1') date_array = [datetime(year, month, 1).strftime("%Y-%m") for month in range(1, 13) ] dates.extend(date_array) pf_accidents['latitude'] = pf_accidents['latitude'].astype(float) pf_accidents['longitude'] = pf_accidents['longitude'].astype(float) pf_accidents['feridos'] = pf_accidents['feridos'].astype(float) heatmap_data = pf_accidents[pf_accidents['uf'] == 'ES'] heatmap_data = heatmap_data[heatmap_data['feridos'] > 0] heatmap_data = heatmap_data[['latitude', 'longitude', 'data_inversa']] heatmap_data = heatmap_data.dropna(axis = 0, subset = ['latitude', 'longitude']) heatmap_data = [[row['latitude'], row['longitude'], row['data_inversa']] for index, row in heatmap_data.iterrows()] for date in date_array: heatmap_array = [[row[0], row[1]] for row in heatmap_data if row[2][:-3] == date] if heatmap_array: heatmap.append(heatmap_array) else: dates.remove(date) es_map = folium.Map(location = [-19.690729, -40.533432], zoom_start = 7.5) hm = plugins.HeatMapWithTime(heatmap, index = dates) hm.add_to(es_map) es_map.save('index.html')
covid_df['Log Confirmed'] = np.log10( covid_df['Confirmed']) # Log-scaling data to ease visualization covid_df['Log Confirmed'] = covid_df['Log Confirmed'].replace({ -np.inf: 0 }) # Replacing zero cases (log=-inf) by value small enough to not be visible maximum = covid_df.max()['Log Confirmed'] # Getting max log value covid_df['Std Confirmed'] = covid_df['Log Confirmed'].div( maximum) # Min Max Scaling to match weight required interval data = [] # Setting up data in format required by HeatMapWithTime for date in dates: # List of lists [lat, long, weight]. I added 1e-5 to all weights because the accepted interval is (0, 1] day_data = zip( covid_df[covid_df['Date'] == date]['Lat'].tolist(), covid_df[covid_df['Date'] == date]['Long'].tolist(), covid_df[covid_df['Date'] == date]['Std Confirmed'].add(1e-5).tolist()) day_data = [list(elem) for elem in day_data] data.append( day_data) # Appending on outer list corresponding to timestamps # Creting heat map heat_map = plugins.HeatMapWithTime(data, index=dates, min_speed=2.5, max_speed=6, speed_step=0.5) heat_map.add_to(map) # Saving map as HTML file map.save('covid_map.html')
def visualize_heatmap_by_hour(data, districts, start_date='2018-01-01', end_date='2018-12-31'): """ Visualize the geographical distribution of collisons in a series of hourly heatmaps. Args: data(pandas dataframe): Dataframe containing the rows per collisions which is to be mapped. districts (string): the district of where the collisions to be presented in the heatmap occured start_date (string): the starting date of the collisions to be presented in the heatmap end_date (string): the end date of the collisions to be presented in the heatmap Returns: the heatmap produced Raises: ValueError: if timeframe user selected is invalid """ columns = ['Y', 'X', 'date', 'object_id', 's_hood', 'l_hood', 'hour'] df_collision = data.reindex(columns=columns).dropna(axis=0, how='any') if np.datetime64(start_date) > np.datetime64(end_date): raise ValueError("Invalid timeframe input. Please enter again.") timeMask = ((df_collision['date'] >= np.datetime64(start_date)) & (df_collision['date'] <= np.datetime64(end_date))) if districts == 'ALL': index = timeMask else: index = timeMask & (df_collision['l_hood'].isin(districts)) if sum(index) == 0: print('No matched collision.') return None data = df_collision.reindex(index[index].index.values) hours = sorted(data.hour.value_counts().index.values) data = list() for hour in hours: timeMask = df_collision['hour'] == hour if districts == 'ALL': index = timeMask else: index = timeMask & (df_collision['l_hood'].isin(districts)) coordinates = df_collision.reindex( index[index].index.values)[['Y', 'X']].values coordinates = coordinates * np.array([[1, 1]]) data.append(coordinates.tolist()) # create heatmap object time_index = [(str(hour) + ' AM') if hour < 12 else str(hour - 12) \ + ' PM' if hour > 12 else str(hour) + ' PM' \ for hour in hours] m = folium.Map(location=MAP_LOCATION_START, zoom_start=MAP_ZOOM) hm = plugins.HeatMapWithTime(data, index=time_index, auto_play=True, max_opacity=0.3) hm.add_to(m) return m
np.array([[48, 5]]) ) move_data = np.random.normal(size=(100, 2)) * 0.01 data = [(initial_data + move_data * i).tolist() for i in range(100)] weight = 1 # default value for time_entry in data: for row in time_entry: row.append(weight) m = folium.Map([48., 5.], tiles='stamentoner', zoom_start=6) hm = plugins.HeatMapWithTime(data) hm.add_to(m) from datetime import datetime, timedelta time_index = [ (datetime.now() + k * timedelta(1)).strftime('%Y-%m-%d') for k in range(len(data)) ] m = folium.Map([48., 5.], tiles='stamentoner', zoom_start=6)
def index(): data = pd.read_csv( 'https://www.data.gouv.fr/fr/datasets/r/63352e38-d353-4b54-bfd1-f1b3ee1cabd7', sep=';') data['dep'] = data['dep'].astype('str') data['jour'] = data['jour'].astype('datetime64') data_deces = data[['dep', 'jour', 'dc']]\ .groupby(['jour', 'dep'])\ .max()\ .reset_index()\ .sort_values(['dep', 'jour']) data_deces = data_deces.dropna() data_deces = data_deces[data_deces['dep'] != 'nan'] data_deces = data_deces[['dep', 'jour', 'dc']].reset_index() deces_cumules_par_dept_as_pandas = data_deces[['dep', 'jour', 'dc']] index_time = list( pd.to_datetime(list( deces_cumules_par_dept_as_pandas['jour'].unique()))) index_time = [str(x.date()) for x in index_time] max_dc = 200 deces_cumules_par_dept_as_pandas[ 'dc'] = deces_cumules_par_dept_as_pandas['dc'] / max_dc deces_cumules_par_dept_as_pandas['dc'] = deces_cumules_par_dept_as_pandas['dc'].\ apply(lambda x: 0.01 if x<=0 else x) with open( 'C:\\Users\\Utilisateur\\OneDrive\\Bureau\\Berouachedi_project\\departements.geojson.txt' ) as f: geojson_depts = json.load(f) def get_centroid(coords): """cette fonction va chercher le centre approximatif d'un département un departement est considiré comme un ensemble de points geographiques et le but de cette fonction est de retourner la latitude et longitude du centre de tous les points""" l = str(coords).strip() l = l.replace('[', '').replace(']', '').replace(' ', '').split(',') lat_list = [float(e) for e in l[1::2]] lng_list = [float(e) for e in l[0::2]] return np.median(lat_list), np.median(lng_list) dept_list = [] for dept in geojson_depts['features']: lat, lng = get_centroid(dept['geometry']['coordinates']) code_dept = dept['properties']['code'] dept_list.append([code_dept, lat, lng]) dept_df = pd.DataFrame(dept_list, columns=['dept', 'lat', 'lng']) dept_df = dept_df.sort_values('dept').reset_index() dept_df = dept_df[['dept', 'lat', 'lng']] with open( 'C:\\Users\\Utilisateur\\OneDrive\\Bureau\\Berouachedi_project\\lat_lng_value.json' ) as f: lat_lng_value = json.load(f) def style_function(feature): return { 'fillOpacity': 0, 'weight': 0.9, } new_map = folium.Map([46.890232, 2.599816], tiles='stamentoner', zoom_start=6) hm = plugins.HeatMapWithTime(lat_lng_value['lat_lng_value'], index=index_time, auto_play=True, max_opacity=0.8, radius=0.2, scale_radius=True, gradient={ .2: 'blue', .4: 'lime', 0.5: 'red' }, min_speed=1) hm.add_to(new_map) folium.GeoJson( geojson_depts, style_function=style_function, tooltip=folium.GeoJsonTooltip( fields=['nom', 'code'], aliases=[ '<div style="background-color: lightyellow; color: black; padding: 3px; border: 5px solid black; border-radius: 6px;">' + item + '</div>' for item in ['nom', 'code'] ], style="font-family: san serif;", localize=True, labels=False, sticky=False), highlight_function=lambda x: { 'weight': 3, 'fillColor': 'grey' }).add_to(new_map) folium.LayerControl().add_to(new_map) new_map.save( 'templates/france_departements_visualisation_deaths_of_covid.html') return render_template('index_v3.html')
alldata['timestart'] = pd.to_datetime(alldata['timestart']) alldata['timestopnum'] = alldata[ 'timestop'].dt.hour + alldata['timestop'].dt.minute / 60 alldata['timestartnum'] = alldata[ 'timestart'].dt.hour + alldata['timestart'].dt.minute / 60 start = [0] while start[-1] < 23.75: start.append(start[-1] + 15 / 60) final = [] for j in range(1, len(start)): datase = alldata[(alldata.timestartnum < start[j]) & (alldata.timestartnum > start[j - 1])] timedata = [] for j, row in datase.iterrows(): timedata.append([row['latstop'], row['lonstop'], wat]) final.append(timedata) from datetime import datetime, timedelta time_index = [ (datetime(1996, 9, 19) + k * timedelta(minutes=15)).strftime('%H:%M:%S') for k in range(24 * 4) ] m = folium.Map([13.737797, 100.559699], tiles=typesmap, zoom_start=12) hm = plugins.HeatMapWithTime(final, index=time_index[1:], auto_play=True, max_opacity=opacity) hm.add_to(m) m.save(out + ".html")