Ejemplo n.º 1
0
def IndiaMap(df ,colorBy, columns):
    state_geo =  os.path.abspath(os.path.join('Data', 'indiageojson.json'))
    m = folium.Map(location=[plotting["India"]["Center"]["Lat"], plotting["India"]["Center"]["Long"]]
                    , zoom_start=plotting["DefaultZoom"]
                    , tiles='cartodbpositron')

    threshold_scale = split_six(df[colorBy])
    m.choropleth(
            geo_data=state_geo,
            data = df,
            columns=columns,
            name='choropleth',
            key_on='feature.id',
            fill_color='PuBuGn',
            fill_opacity=0.7,
            line_opacity=0.2,
            threshold_scale=threshold_scale,
            legend_name=colorBy

    )


    markers = CreatePopupCirlcesForLocations(df , columns[0] , colorBy , 'deviationFromMean')
    [marker.add_to(m) for marker in markers]

    #GetText([12,86] , ).add_to(m)
    folium.LayerControl().add_to(m)

    return IndiaMapModel(colorBy ,  'Sized By Deviation From Mean : {0}<br/> Green < Mean <br/> Red > Mean'.format(df[colorBy].mean()) , m)
def generate_hazard_map_html(model,X,mapdata,html_map_name):

    '''
    Generate new hazard map as a html file
    INPUTS
    model: an estimator object
    X: the data for which we want to predict fires
    mapdata: block data in the form of SF_blocks_years
    html_map_name: name of the html file to be produced
    '''


    GISCELLS = list(X['GISYEARJOIN'])
    mapgeom = mapdata[mapdata['GISYEARJOI'].isin(GISCELLS)]
    fires_holdout_predict = model.predict_proba(X)
    riskmap = gpd.GeoDataFrame({'geometry':mapdata['geometry'],'fire_prob':fires_holdout_predict[:,1]})

    riskmap['BLOCKID'] = np.arange(len(riskmap_2018))
    gdf_wgs84 = riskmap.copy()
    gdf_wgs84.crs = {'init': 'epsg:4326', 'no_defs': True}

    thresh_scale = split_six(riskmap_2018['fire_prob'])

    m = folium.Map(location=[37.76, -122.42],zoom_start=13,
               tiles="CartoDB positron",
              width='100%',
              height='100%')

    m.choropleth(geo_data=gdf_wgs84.to_json(),data=riskmap_2018,columns=['BLOCKID','fire_prob'],
             key_on = 'feature.properties.{}'.format('BLOCKID'),
             max_zoom=16,
             fill_opacity=0.8,
             fill_color='OrRd',
             line_opacity=0.1,
             highlight=True,
             legend_name='Probability of fire',
             legend_scale=thresh_scale)

    m.add_child(folium.LatLngPopup())

    m.save(html_map_name)
Ejemplo n.º 3
0
).add_to(m)
folium.TopoJson(
    open(antarctic_ice_shelf_topo),
    'objects.antarctic_ice_shelf',
    name='topojson'
).add_to(m)
folium.LayerControl().add_to(m)
m.save('/tmp/folium_xx_ice_map.html')
###############################################################################
import folium
import pandas as pd
state_geo = '/gdata/folium/data/us-states.json'
state_unemployment = '/gdata/folium/data/US_Unemployment_Oct2012.csv'
state_data = pd.read_csv(state_unemployment)
m = folium.Map(location=[48, -102], zoom_start=3)
m.choropleth(geo_data=state_geo, name='choropleth', data=state_data,
    columns=['State', 'Unemployment'], key_on='feature.id',
    fill_color='YlGn', fill_opacity=0.7, line_opacity=0.2,
    legend_name='Unemployment Rate (%)')
folium.LayerControl().add_to(m)
m.save('/tmp/folium_xx_us_states.html')
###############################################################################
from branca.utilities import split_six
threshold_scale = split_six(state_data['Unemployment'])
m = folium.Map(location=[48, -102], zoom_start=3)
m.choropleth(geo_data=state_geo, data=state_data,columns=['State', 'Unemployment'],
    key_on='feature.id', fill_color='BuPu', fill_opacity=0.7,
    line_opacity=0.5, legend_name='Unemployment Rate (%)',
    threshold_scale=threshold_scale, reset=True)
m.save('/tmp/folium_xx_us_states_d3.html')
Ejemplo n.º 4
0
county_shp = county_shp.loc[county_shp['STATEFP']=='48']
county_shp = county_shp.rename(columns={'NAME':'CountyName'})
county_shp['CountyName'] = county_shp['CountyName'].apply(lambda x: x.upper())

census_data = gpd.GeoDataFrame(census_data)
census_data['CountyName'].apply(lambda x: x.upper())
gdf = gdf.merge(census_data, on='CountyName')
gdf = gdf.merge(county_shp, on='CountyName')
gdf['TrafficperCapita'] = gdf['2015Traffic'] / gdf['Total Population']
gdf = gpd.GeoDataFrame(gdf)

gdf = gdf.sort_values(by='TrafficperCapita', ascending=False)
gdf.geometry = gdf['geometry']
m = folium.Map([31.2338,-98.6768], tiles='Stamen Toner', zoom_start=6)

thresholdscale = split_six(gdf['TrafficperCapita'])

m.choropleth(geo_data=gdf.to_json(), data=gdf, columns=['CountyName','TrafficperCapita'], name='Road Traffic Counts in Texas Counties, per Capita 2015', legend_name='Average Daily Traffic Count per Capita',
             key_on='feature.properties.CountyName', fill_color='OrRd', fill_opacity=0.9, threshold_scale=thresholdscale,
             highlight=True,
             )

datadict = {'2015 Traffic County Details':gdf}

def addlayer(dictobject):
    for geodata in dictobject:
        fg = folium.FeatureGroup(name=geodata)
        local = dictobject[geodata]
        for geo, traffic15, totPop, trafCapita, countyname, name in zip(local.geometry, local['2015Traffic'], local['Total Population'], local['TrafficperCapita'], local['NAMELSAD'], local['CountyName']):
            folium.Marker([geo.centroid.y+0.03, geo.centroid.x-0.1], icon=DivIcon(icon_size=(7,12), icon_anchor=(0,0), popup_anchor=(0, 0),
                                           html=f'<div style="font-size:5pt; font-family:helvetica neue; text-align:center"><b>{name}</b></div>'),
Ejemplo n.º 5
0
from branca.utilities import split_six

# Pathing to and reading in the csv with city names, longitudes, latitudes, population etc to a pandas df
city_data_path = os.path.join('data', 'us-cities.csv')
city_df = pd.read_csv(city_data_path)

population_column = city_df['population']

# filepath for geojson which contains coordinate info for mapping state boundaries on map
states_geo = os.path.join('data', 'us-states.json')

# Pathing to and reading in .csv with states and populations as name, pop
state_data_path = os.path.join('data', 'us-state-populations.csv')
state_df = pd.read_csv(state_data_path)

threshold_scale = split_six(state_df['pop'])

# Median of lat/lon used to center the map between the data points
mean_latitude = city_df['lat'].mean()
mean_longitude = city_df['lon'].mean()

# Finding min/max of populations for generating quartiles
minimum_population = population_column.min()
maximum_population = population_column.max()

# Creating the base folium map centered on the mean lat/lon
base_map = folium.Map(location=[mean_latitude, mean_longitude], tiles='Mapbox Control Room',
                      zoom_start=5)

# Identifying quartiles for setting a green to red color gradient based on population
lower_quartile = int(population_column.quantile(.25))
Ejemplo n.º 6
0
# Merging the county data with census data (Population)
counties = counties.merge(census_data, on='NAME')
counties = counties.merge(dps_regions_list, on='NAME', )
counties = gpd.GeoDataFrame(counties).sort_values(by='Total Population', ascending=False)

# Dissolving the county polygons into larger Metro regions.
CBSAFP = counties.dissolve(by='CBSAFP', as_index=False, aggfunc='sum')

# Dissolving the county polygons into DPS regions.
regions = counties.dissolve(by='DPSRegion',as_index=False, aggfunc='sum')

# Instantiating the folium Map object
regionsmap = folium.Map([31.2338,-98.6768], tiles='OpenStreetMap', zoom_start=6)

# Assigning a threshold scale based on Population
thresholdscale = split_six(regions['Total Population'])

# Adding a choropleth map of the DPS regions as the first layer.
regionsmap.choropleth(
    geo_data=regions.to_json(), data=regions, name='Texas DPS Region Polygons',columns=['DPSRegion','Total Population'],
    legend_name= 'Texas DPS Regions by Population',key_on='feature.properties.DPSRegion', threshold_scale=thresholdscale,
    fill_color='OrRd', fill_opacity=0.5, highlight=True, line_weight=3.0,
)

# A MSA dictonary to reverence the CBSAFP codes to
msa_dict = {
    "12420":"Austin-Round Rock-San Marcos, TX Metro Area",
    "41700":"San Antonio-New Braunfels, TX Metro Area",
    "26420":"Houston-Sugar Land-Baytown, TX Metro Area",
    "19100":"Dallas-Fort Worth-Arlington, TX Metro Area"
}