Beispiel #1
0
def da_polygon_sjoin(polygon_gdf,
                     annotate_column=False, ann_col_name=None, ann_label=None, start_date=None, end_date=None):
    """
    v1.0
    function performs the SPATIAL JOIN of GeoDataFrame with Dissimination Areas
    with the provided GeoDataFrame containing polygons of interest

    CRS of the polygon of interest needs to be specified as it will be converted
    to CRS of 'teranet_da_gdf' during the SPATIAL JOIN

    SPATIAL JOIN parameters -- how='inner', op='within'

    joins data from the global variable (GeoDataFrame) 'teranet_da_gdf'

    spatial join is performed on the GeoDataFrame with DA polygons and a column with Teranet aggregate,
    not on the actual points of Teranet records

    annotates the IDs (index) of the DAs from the subset on the map

    (optional) can add annotations with the values from column 'ann_col_name' in 'teranet_da_gdf'
    in this case, 'start_date' and 'end_date' need to be provided as well for the map title

    ----------------
    Input arguments: polygon_gdf    -- GeoDataFrame -- GeoDataFrame containing polygon(s) of interest
                                                       it will be used to subset GeoDataFrame of DAs
                                                       via a SPATIAL JOIN
                                                       CRS of the polygon needs to be specified and
                                                       will be converted to CRS of 'teranet_da_gdf'
                                                       during the SPATIAL JOIN

                     annotate_column -- bool   -- option to add annotations with values from a column
                                                  found in GeoDataFrame 'teranet_da_gdf'
                                                  to each DA on the map (e.g., counts of Teranet records)
                                                  (default=False)

                     ann_col_name    -- string -- column name from GeoDataFrame 'teranet_da_gdf'
                                                  to provide values if 'annotate_column' is True
                                                  Note: if true, 'start_date' and 'end_date' need to
                                                  be provided as well for map annotation
                                                  (default=None)

                     ann_label       -- string -- label to be used after the values from 'ann_col_name'
                                                  to annotate the map (e.g., 'records', '$ mean', etc.)
                                                  (default=None)

                     start_date      -- string -- start_date of Teranet subset, used for the map title
                                                  needed if 'ann_col_name' is True
                                                  (default=None)

                     end_date        -- string -- end_date of Teranet subset, used for the map title
                                                  needed if 'ann_col_name' is True
                                                  (default=None)
    --------
    Returns:     teranet_da_polygon_gdf -- GeoDataFrame -- subset of GeoDataFrame with DAs from
                                                           the SPATIAL JOIN with polygon of interest

    -----------------
    Global variables:  teranet_da_gdf -- GeoDataFrame -- GeoDataFrame with dissimination areas
                                                         joined with Teranet aggregate
                                                         generated in step 1.2, 2.2, 3.2


    """
    # global variables (see Docstring for description)
    global teranet_da_gdf
    # perform the SPATIAL JOIN between the Teranet DA GeoDataFrame and the polygon of interest
    teranet_da_polygon_gdf = gpd.sjoin(teranet_da_gdf, polygon_gdf.to_crs(teranet_da_gdf.crs),
                                       how='inner',
                                       op='within')

    # plot the GeoDataFrame with the polygon of interest
    # create figure and axis
    f, ax = plt.subplots(1, figsize=(12, 12))

    # plot subset of DAs
    teranet_da_polygon_gdf.to_crs(epsg=3857).plot(ax=ax, color='red', edgecolor='black', alpha=0.3)

    # plot the polygon of interest
    polygon_gdf.to_crs(epsg=3857).plot(ax=ax, alpha=0.3)

    # add basemap
    ctx.add_basemap(ax, url=ctx.sources.ST_TONER_BACKGROUND, alpha=0.1)

    # set axis parameters
    ax.set_title("{0} DAs selected within the polygon of interest".format(len(teranet_da_polygon_gdf)),
                 fontdict={'fontsize': '16', 'fontweight': '3'})
    ax.set_axis_off()

    # zoom the map to the polygon of interest
    minx, miny, maxx, maxy = downtown_polygon_gdf.to_crs(epsg=3857).total_bounds
    ax.set_xlim(minx, maxx)
    ax.set_ylim(miny, maxy)

    # add annotation to each DA with (optional) values from 'ann_col_name' and 'ann_label'
    if annotate_column == True:
        # plot counts of Teranet records for each of top 20 DAs
        for index, centroid in teranet_da_polygon_gdf.to_crs(epsg=3857).centroid.iteritems():
            x, y = centroid.coords[0]
            ax.text(x,
                    y,
                    "DA #" + str(index) + ": \n" + str(teranet_da_gdf.loc[index, ann_col_name]) + ann_label,
                    verticalalignment='center',
                    horizontalalignment='center')
        # set plot title
        ax.set_title("{0} DAs selected within the polygon of interest\nfrom {1} to {2}"
                     .format(len(teranet_da_polygon_gdf),
                             start_date,
                             end_date),
                     fontdict={'fontsize': '16', 'fontweight': '3'})


    else:
        # plot IDs of DAs in the subset generated by the SPATIAL JOIN
        for index, centroid in teranet_da_polygon_gdf.to_crs(epsg=3857).centroid.iteritems():
            x, y = centroid.coords[0]
            ax.text(x,
                    y,
                    "DA #" + str(index),
                    verticalalignment='center',
                    horizontalalignment='center')
        # set plot title
        ax.set_title("{0} DAs selected within the polygon of interest".format(len(teranet_da_polygon_gdf)),
                     fontdict={'fontsize': '16', 'fontweight': '3'})

    plt.show()

    print("A subset with {0} DAs was created via a SPATIAL JOIN of GeoDataFrame 'teranet_da_gdf' \
and GeoDataFrame 'polygon_gdf'.\n---\n".format(len(teranet_da_polygon_gdf)))

    return teranet_da_polygon_gdf
Beispiel #2
0
def plot_oa_importance(
    lad20cd,
    oa_weights,
    theta=500,
    title="",
    save_path=None,
    ax=None,
    figsize=(10, 10),
    alpha=0.75,
    cmap="plasma",
    legend=True,
    vmin=None,
    vmax=None,
    show=True,
):
    """Plot the "importance" of each OA given a weighting for each
    OA and a coverage distance (theta). Importance is defined as the
    total coverage (of the city) by placing a sensor at the OA centroid.
    With the greedy optimisation algorithm, the OA with the highest
    importance is where the first sensor in the network will be placed.

    Arguments:
        oa_weights {pd.Series} -- Weights for each OA (indexed by oa11cd)

    Keyword Arguments:
        theta {int} -- coverage decay rate (default: {500})
        title {str} -- plot title (default: {""})
        save_path {str} -- path to save output plot or None to not save
        (default: {None})
        ax {[type]} -- matplotlib qxis to plot to (create one if None)
        figsize {tuple} -- plot figure size (default: {(10,10)})
        alpha {float} -- transparency of fill areas (default: {0.75})
        cmap {str} -- matplotlib colormap for fill areas (default: {"plasma"})
        legend {bool} -- if True show the color scale (default: {True})
        vmin {[type]} -- minimum value of color scale, or None to autoscale (default: {None})
        vmax {[type]} -- maximum value of color scale, or None to autoscale (default: {None})
    """

    oa_centroids = get_oa_centroids(lad20cd)
    oa_centroids["weight"] = oa_weights

    oa_x = oa_centroids["x"].values
    oa_y = oa_centroids["y"].values
    oa_weight = oa_centroids["weight"].values
    oa11cd = oa_centroids.index.values

    n_poi = len(oa_x)
    coverage = coverage_matrix(oa_x, oa_y, theta=theta)

    # to store total coverage due to a sensor at any output area
    oa_importance = np.zeros(n_poi)

    for site in range(n_poi):
        oa_importance[site] = (oa_weight * coverage[site, :]).sum() / oa_weight.sum()

    oa_importance = pd.Series(data=oa_importance, index=oa11cd)

    oa_shapes = get_oa_shapes(lad20cd)
    oa_shapes["importance"] = oa_importance

    if ax is None:
        ax = plt.figure(figsize=figsize).gca()

    if legend:
        cax = get_color_axis(ax)
        cax.set_title("Density")

    else:
        cax = None

    ax = oa_shapes.plot(
        column="importance",
        figsize=figsize,
        alpha=alpha,
        cmap=cmap,
        legend=legend,
        ax=ax,
        cax=cax,
        vmin=vmin,
        vmax=vmax,
    )

    ctx.add_basemap(
        ax,
        source="http://a.tile.stamen.com/toner/{z}/{x}/{y}.png",
        crs=oa_shapes.crs.to_epsg(),
    )
    ax.set_title(title)
    ax.set_axis_off()

    if save_path:
        plt.savefig(save_path, dpi=200)
        plt.close()
    elif show:
        plt.show()
Beispiel #3
0
def plot_footprints(cam_dir,
                    img_dir,
                    reference_dem,
                    output_directory=None,
                    show=False,
                    verbose=False,
                    basemap='ctx',
                    img_file_extension='.tif',
                    cam_file_extension='.tsai'):
    """
    Function to plot image footprints from images and camera files
    """

    out_dir_abs = bare.io.create_dir(output_directory)

    cam_list = sorted(
        glob.glob(os.path.join(cam_dir, '*' + cam_file_extension)))
    img_list = sorted(
        glob.glob(os.path.join(img_dir, '*' + img_file_extension)))

    df = pd.DataFrame()

    for cam_file in cam_list:

        cam_file_path, cam_file_base_name, cam_file_extension = bare.io.split_file(
            cam_file)

        for img_file in img_list:
            img_file_path, img_file_base_name, img_file_extension = bare.io.split_file(
                img_file)

            if img_file_base_name in cam_file_base_name:
                img_file_name = img_file

        if verbose == True:
            print('\nGenerating footprint for ' + img_base_name +
                  img_file_extension + '.')

        footprint = bare.plot.prepare_footprint(
            img_file_name,
            cam_file,
            reference_dem,
            output_directory=output_directory)

        if footprint is not None:
            crs = footprint.crs
            df = df.append(footprint)
        else:
            continue

    if not df.empty:
        # plot returned footprints
        footprints = gpd.GeoDataFrame(df,
                                      columns=['file_name', 'geometry'],
                                      crs=crs)
        if basemap == 'ctx':
            footprints = footprints.to_crs(epsg=3857)

        footprints = bare.geospatial.extract_polygon_centers(footprints)

        fig, ax = plt.subplots(1, figsize=(10, 10))

        footprints.plot(ax=ax, color='b', edgecolor='b', alpha=0.1)

        for idx, row in footprints.iterrows():
            plt.annotate(s=row['file_name'],
                         xy=row['polygon_center'],
                         horizontalalignment='center')

        # # alternative plotting approaches
        # footprints.plot(ax=ax,
        #         facecolor='none',
        #         edgecolor='b')
        #
        # footprints.plot(ax=ax,
        #         column='file_name',
        #         legend=True,
        #         facecolor='none',
        #         edgecolor='b',
        #         legend_kwds={'bbox_to_anchor': (1.41, 1)})

        ctx.add_basemap(ax)
        ax.set_title('camera footprints')

        # visualize or write to file if out_dir_abs provided
        if show == False:
            out = os.path.join(out_dir_abs, 'footprints.png')
            fig.savefig(out, bbox_inches="tight")
            plt.close()
        else:
            plt.show()
Beispiel #4
0
def plot_residuals(ba_dir,
                   output_directory=None,
                   ascending=True,
                   basemap='ctx',
                   glacier_shape_fn=None,
                   share_axes=True):
    # TODO
    # - Create interactive plot (html with bokeh maybe) to pan and zoom around
    # when residuals end up in weird places.
    # - Allow for plotting over orthoimage mosaic.
    # - Add condition to only sharex sharey if extent similar enough,
    # else filter and report gross outliers.
    '''

    Function to visualize residuals before and after camera alignment during bundle adjustment.

    '''

    print('Plotting residuals before and after bundle adjustment...')

    # create output directory
    out_dir_abs = bare.io.create_dir(output_directory)

    initial_point_map_csv_fn = glob.glob(
        os.path.join(ba_dir, '*initial_*_pointmap*csv'))[0]
    final_point_map_csv_fn = glob.glob(
        os.path.join(ba_dir, '*final_*_pointmap*csv'))[0]

    initial_df = pd.read_csv(initial_point_map_csv_fn, skiprows=[1, 1])
    final_df = pd.read_csv(final_point_map_csv_fn, skiprows=[1, 1])

    # convert to GeoDataFrame
    initial_gdf = bare.core.ba_pointmap_to_gdf(initial_df, ascending=ascending)
    final_gdf = bare.core.ba_pointmap_to_gdf(final_df, ascending=ascending)

    if share_axes == True:
        fig, ax = plt.subplots(1, 2, figsize=(10, 5), sharex=True, sharey=True)
    else:
        fig, ax = plt.subplots(1, 2, figsize=(10, 5))

    clim = np.percentile(initial_gdf['mean_residual'].values, (2, 98))

    # add plots to show number of images per match point
    initial_gdf.plot(column='mean_residual',
                     ax=ax[0],
                     cmap='inferno',
                     vmin=clim[0],
                     vmax=clim[1],
                     legend=True,
                     s=initial_gdf['num_observations'] /
                     initial_gdf['num_observations'].max())

    final_gdf.plot(column='mean_residual',
                   ax=ax[1],
                   cmap='inferno',
                   vmin=clim[0],
                   vmax=clim[1],
                   legend=True,
                   s=final_gdf['num_observations'] /
                   final_gdf['num_observations'].max())

    ax[0].set_title('Before bundle adjustment (n=%i)' % initial_df.shape[0])
    ax[1].set_title('After bundle adjustment (n=%i)' % final_df.shape[0])
    ax[0].set_facecolor('0.5')
    ax[1].set_facecolor('0.5')

    if glacier_shape_fn:
        glacier_shape = gpd.read_file(glacier_shape_fn)
        glacier_shape = glacier_shape.to_crs({'init': 'epsg:3857'})
        glacier_shape.plot(ax=ax[0], alpha=0.5)
        glacier_shape.plot(ax=ax[1], alpha=0.5)

    if share_axes == False:
        ax_0_xlim = ax[0].get_xlim()
        ax_1_xlim = ax[1].get_xlim()
        ax_0_max = abs(abs(ax_0_xlim[0]) - abs(ax_0_xlim[1]))
        ax_1_max = abs(abs(ax_1_xlim[0]) - abs(ax_1_xlim[1]))
        if ax_1_max > ax_0_max:
            ax[1].set_xlim(ax[0].get_xlim())
            ax[1].set_ylim(ax[0].get_ylim())
        else:
            ax[0].set_xlim(ax[1].get_xlim())
            ax[0].set_ylim(ax[1].get_ylim())

    if basemap == 'ctx':
        ctx.add_basemap(ax[0])
        ctx.add_basemap(ax[1])

    plt.suptitle("Match point mean residuals (m)")

    if out_dir_abs is not None:
        out = os.path.join(out_dir_abs,
                           'ba_match_residuals_before_and_after.jpg')
        fig.savefig(out, quality=85, dpi=300, bbox_inches="tight")
        plt.close()
    else:
        plt.show()
Beispiel #5
0
ax = gdf6.plot(column='confirmed2',
               cmap='Reds',
               legend=False,
               figsize=(16, 16),
               alpha=0.5,
               edgecolor='k')
# 各种文字图标
plt.xlabel('经度', fontsize=18)
plt.ylabel('纬度', fontsize=18)
plt.title("珠海市疫情地图(确诊数)", fontsize=24)
for idx, row in gdf6.iterrows():
    x, y = lonlat_to_Mercator_(row.lng, row.lat)
    ax.text(x, y, s=row["name"]+str(row.confirmed), horizontalalignment='center',fontsize=16, \
            color="red",bbox={'facecolor': "none", 'alpha':0.8, 'pad': 2, 'edgecolor':'none'})
# 增加底图
ctx.add_basemap(ax, source=gaodeurl)
plt.show()

# contextily包测试
# 只有1个行政区域,但有多个多边形
gdzh = gpd.read_file("d:/temp/geojsonutf8/中华人民共和国/广东省/珠海市/440400.json",
                     encoding='utf-8')
gdzh = gdzh.to_crs(epsg=3857)
gdzh.plot(figsize=(16, 16))
plt.show()
# 坐标的四个边界,都不对,为什么?只有1个行政区域,但有多个多边形,应该是随便选了第一个多边形计算
gdzh["geometry"].bounds
gdzh["geometry"].total_bounds
# 多个行政区域,多个多边形
gdzh = gpd.read_file("d:/temp/geojsonutf8/中华人民共和国/广东省/珠海市/440400_full.json",
                     encoding='utf-8')
Beispiel #6
0
        ### plot gedataframe with colors -----###
        ## add background map ###
        gdf = all_counts_uv
        import contextily as ctx

        # minx, miny, maxx, maxy = gdf.geometry.total_bounds
        # polygon.geometry.total_bounds

        ## reproject with mercator coordinates (this is the coordinate system of the basemap)
        gdf = gdf.to_crs(epsg=3857)
        # Plot the data within the RECTANGULAR extensions
        fig, ax = plt.subplots(figsize=(10, 10))
        polygon = polygon.to_crs(epsg=3857)
        polygon.plot(alpha=0, color="white", edgecolor="black", ax=ax)
        gdf.plot(ax=ax, alpha=1, color=gdf['color'])
        ctx.add_basemap(ax, source=ctx.providers.CartoDB.Positron)
        ax.set_axis_off()
        # plt.axis('equal')
        plt.show()

        # 'OpenStreetMap.Mapnik',
        # 'OpenTopoMap',
        #  'Stamen.Toner',
        #  'Stamen.TonerLite',
        #  'Stamen.Terrain',
        #  'Stamen.TerrainBackground',
        #  'Stamen.Watercolor',
        #  'NASAGIBS.ViirsEarthAtNight2012',
        #  'CartoDB.Positron',
        # 'CartoDB.Voyager'
Beispiel #7
0
def play_turn():
    matplotlib.rcParams['hatch.linewidth'] = 3
    pandas.set_option('mode.chained_assignment', None)

    months = [
        'January', 'February', 'March', 'April', 'May', 'June', 'July',
        'August', 'September', 'October', 'November', 'December'
    ]
    powiaty = geopandas.read_file('map-data/powiaty.shp', encoding='utf-8')
    powiaty_shapes = geopandas.read_file('map-data/powiaty-shapes.shp',
                                         encoding='utf-8')
    powiaty = powiaty.merge(powiaty_shapes,
                            how='left',
                            left_index=True,
                            right_index=True)
    powiaty = powiaty.drop(columns='code_y')
    powiaty = powiaty.rename(columns={
        'code_x': 'code',
        'geometry_x': 'geometry',
        'geometry_y': 'powiat_shape'
    })
    powiaty = powiaty.set_geometry('geometry')

    for index, row in powiaty.iterrows():
        all_rows_for_powiat = powiaty[powiaty['belongs_to'] == row['code']]
        if (all_rows_for_powiat.empty):
            powiaty['geometry'][powiaty['code'] == row['code']] = None
        else:
            all_rows_for_powiat = all_rows_for_powiat.set_geometry(
                'powiat_shape')
            row_geometry = all_rows_for_powiat.unary_union
            powiaty['geometry'][powiaty['code'] == row['code']] = row_geometry

    with open('map-data/status.txt', 'r') as f:
        powiaty_left = int(f.readline())
        last_powiat = f.readline().rstrip()
        date = int(f.readline())

    month = months[date % 12]
    year = 1999 + date // 12
    message = '{} {}'.format(month, year)

    #find a random powiat, its owner will be conquering
    #a powiat conquering previously has a 40% chance of being chosen for sure
    if last_powiat == '0' or random.random() < 0.6:
        random_powiat_row = powiaty.loc[[random.choice(powiaty.index)]]
    else:
        all_rows_for_conquering_powiat = powiaty[powiaty['belongs_to'] ==
                                                 last_powiat]
        random_powiat_row = all_rows_for_conquering_powiat.loc[[
            random.choice(all_rows_for_conquering_powiat.index)
        ]]

    random_powiat_code = random_powiat_row['code'].iloc[0]
    random_powiat_belongs_to = random_powiat_row['belongs_to'].iloc[0]
    conquering_powiat_row = powiaty[powiaty['code'] ==
                                    random_powiat_belongs_to]
    conquering_powiat_code = conquering_powiat_row['code'].iloc[0]
    conquering_powiat_value = conquering_powiat_row['value'].iloc[0]
    conquering_powiat_geometry = conquering_powiat_row['geometry'].iloc[0]
    conquering_powiat_name = conquering_powiat_row['name'].iloc[0].lstrip(
        'miasto ')

    all_rows_for_conquering_powiat = powiaty[powiaty['belongs_to'] ==
                                             conquering_powiat_code]
    neighbours = []
    for index, row in powiaty.iterrows():
        if (row['belongs_to'] != conquering_powiat_code):
            if (row['powiat_shape'].touches(conquering_powiat_geometry)):
                neighbours.append(row['code'])

    powiat_to_conquer_code = random.choice(neighbours)
    powiat_to_conquer_row = powiaty[powiaty['code'] == powiat_to_conquer_code]
    powiat_to_conquer_geometry = powiat_to_conquer_row['powiat_shape'].iloc[0]
    powiat_to_conquer_owner_code = powiat_to_conquer_row['belongs_to'].iloc[0]
    powiat_to_conquer_name = powiat_to_conquer_row['name'].iloc[0].lstrip(
        'miasto ')

    #find row for conquered powiat owner
    powiat_to_conquer_owner_row = powiaty[powiaty['code'] ==
                                          powiat_to_conquer_owner_code]
    powiat_to_conquer_owner_name = powiat_to_conquer_owner_row['name'].iloc[
        0].lstrip('miasto ')
    powiat_to_conquer_owner_value = powiat_to_conquer_owner_row['value'].iloc[
        0]

    #update value for conquered powiat
    powiaty['belongs_to'][powiaty['code'] ==
                          powiat_to_conquer_code] = conquering_powiat_code

    if (powiat_to_conquer_code != powiat_to_conquer_owner_code):
        message = '{}, {} conquered {} previously occupied by {}.'.format(
            message, conquering_powiat_name, powiat_to_conquer_name,
            powiat_to_conquer_owner_name)
        log_info(message)
    else:
        message = '{}, {} conquered {}.'.format(message,
                                                conquering_powiat_name,
                                                powiat_to_conquer_name)
        log_info(message)

    #find all rows for conquered powiat owner and change geometry
    all_rows_for_powiat_to_conquer_owner = powiaty[
        powiaty['belongs_to'] == powiat_to_conquer_owner_code]
    powiat_to_conquer_owner_geometry = powiat_to_conquer_owner_row[
        'geometry'].iloc[0]
    powiat_to_conquer_owner_geometry = powiat_to_conquer_owner_geometry.difference(
        powiat_to_conquer_geometry)
    powiat_to_conquer_row['geometry'].iloc[0] = powiat_to_conquer_geometry
    powiat_to_conquer_owner_row['geometry'].iloc[
        0] = powiat_to_conquer_owner_geometry
    powiaty['geometry'][
        powiaty['code'] ==
        powiat_to_conquer_owner_code] = powiat_to_conquer_owner_geometry

    if (all_rows_for_powiat_to_conquer_owner.empty):
        info = '🦀 {} is gone 🦀'.format(powiat_to_conquer_owner_name)
        message = '{}\n{}'.format(message, info)
        log_info(info)
        powiaty_left -= 1

    info = '{} powiaty left.'.format(powiaty_left)
    message = '{}\n{}\nCheck the full map at: http://powiatwarbot.xyz/.'.format(
        message, info)
    log_info(info)

    #=== Plotting both maps ===

    cmap = plt.get_cmap('tab20')
    font_dict = {'fontfamily': 'Arial', 'fontsize': 32, 'fontweight': 'bold'}
    path_effects = [
        patheffects.Stroke(linewidth=4, foreground='black'),
        patheffects.Normal()
    ]
    texts = []
    fig, ax = plt.subplots(figsize=(20, 20))
    powiat_to_conquer = powiat_to_conquer_row.set_geometry('powiat_shape')
    powiat_to_conquer_owner_row = powiat_to_conquer_owner_row.set_geometry(
        'geometry')
    conquering_powiat_row = conquering_powiat_row.set_geometry('geometry')

    #get bbox for the detailed map
    conquering_powiat_row.plot(ax=ax)
    powiat_to_conquer_row.plot(ax=ax)
    if (not all_rows_for_powiat_to_conquer_owner.empty):
        powiat_to_conquer_owner_row.plot(ax=ax)

    x_limit = ax.get_xlim()
    y_limit = ax.get_ylim()
    ax.clear()
    ax.set_axis_off()
    ax.set_aspect('equal')
    powiaty_ammount = {}
    powiaty_names = {}

    #every powiat has to be plotted separately, otherwise it would have a color from a normalized color map
    for i in range(len(powiaty)):
        row = powiaty.loc[[i], ]
        row_code = row['code'].iloc[0]
        row_name = row['name'].iloc[0].lstrip('miasto ')
        row_belongs_to = row['belongs_to'].iloc[0]

        powiaty_ammount.setdefault(row_belongs_to, 0)
        powiaty_ammount[row_belongs_to] = powiaty_ammount[row_belongs_to] + 1
        powiaty_names[row_code] = row_name

        if (not powiaty[powiaty['belongs_to'] == row_code].empty):
            row.plot(ax=ax,
                     color=cmap(row['value']),
                     edgecolor='k',
                     linewidth=0.4)

    powiaty = powiaty.set_geometry('powiat_shape')
    powiaty.plot(ax=ax, color='none', dashes=':', edgecolor='k', linewidth=0.3)

    conquering_powiat_row.plot(ax=ax,
                               color='none',
                               edgecolor='green',
                               linewidth=3)
    powiat_to_conquer_row.plot(ax=ax,
                               color=cmap(powiat_to_conquer_owner_value),
                               edgecolor=cmap(conquering_powiat_value),
                               hatch='///')
    powiat_to_conquer_row.plot(ax=ax,
                               color='none',
                               edgecolor='red',
                               linewidth=3)

    #draw text
    conquering_text = plt.text(
        s=conquering_powiat_name,
        x=conquering_powiat_row['geometry'].iloc[0].centroid.x,
        y=conquering_powiat_row['geometry'].iloc[0].centroid.y,
        fontdict=font_dict)
    to_conquer_text = plt.text(
        s=powiat_to_conquer_name,
        x=powiat_to_conquer_row['powiat_shape'].iloc[0].centroid.x,
        y=powiat_to_conquer_row['powiat_shape'].iloc[0].centroid.y,
        fontdict=font_dict)

    conquering_text.set_color('#9DFF9C')
    texts.append(conquering_text)
    to_conquer_text.set_color('#FF977A')
    texts.append(to_conquer_text)

    if (not all_rows_for_powiat_to_conquer_owner.empty):
        powiat_to_conquer_owner_row.plot(ax=ax,
                                         color='none',
                                         edgecolor='blue',
                                         linewidth=3)
        to_conquer_owner_text = plt.text(
            s=powiat_to_conquer_owner_name,
            x=powiat_to_conquer_owner_row['geometry'].iloc[0].centroid.x,
            y=powiat_to_conquer_owner_row['geometry'].iloc[0].centroid.y,
            fontdict=font_dict)
        to_conquer_owner_text.set_color('#788CFF')
        texts.append(to_conquer_owner_text)

    for text in texts:
        text.set_path_effects(path_effects)

    adjust_text(texts,
                only_move={
                    'points': 'y',
                    'texts': 'y'
                },
                va='center',
                autoalign='y')
    contextily.add_basemap(ax,
                           source=contextily.sources.ST_TERRAIN_BACKGROUND,
                           zoom=8)
    plt.savefig('overall-map.png', transparent=True)

    conquering_text.set_position(
        (conquering_powiat_row['geometry'].iloc[0].centroid.x,
         conquering_powiat_row['geometry'].iloc[0].centroid.y))
    to_conquer_text.set_position(
        (powiat_to_conquer_row['powiat_shape'].iloc[0].centroid.x,
         powiat_to_conquer_row['powiat_shape'].iloc[0].centroid.y))

    if (not all_rows_for_powiat_to_conquer_owner.empty):
        to_conquer_owner_text.set_position(
            (powiat_to_conquer_owner_row['geometry'].iloc[0].centroid.x,
             powiat_to_conquer_owner_row['geometry'].iloc[0].centroid.y))

    #set bbox for detailed
    ax.set_xlim(x_limit)
    ax.set_ylim(y_limit)
    adjust_text(texts,
                only_move={
                    'points': 'y',
                    'texts': 'y'
                },
                va='center',
                autoalign='y')
    plt.savefig('detail-map.png', transparent=True)

    #finally, update geometry for conquering conquered powiat
    conquering_powiat_geometry = conquering_powiat_geometry.union(
        powiat_to_conquer_row['powiat_shape'].iloc[0])
    powiaty['geometry'][powiaty['code'] ==
                        conquering_powiat_code] = conquering_powiat_geometry
    powiaty = powiaty.set_geometry('geometry')
    powiaty = powiaty.drop(columns='powiat_shape')
    powiaty.to_file('map-data/powiaty.shp', encoding='utf-8')

    with open('map-data/status.txt', 'w') as f:
        f.write('{}\n'.format(powiaty_left))
        f.write('{}\n'.format(conquering_powiat_code))
        f.write(str(date + 1))

    return message, powiaty_left, powiaty_ammount, powiaty_names
Beispiel #8
0
from

import pandas as pd
import matplotlib.pyplot as plt
import contextily as ctx
import networkx as nx


class NetworkAnalyser:
    def __init__(self):
        network_loader = NetworkBuilder()
        nodes, edges = network_loader.nodes, network_loader.edges


map_loader = MapBuilder()
geodata_nyc = map_loader.data_processed

G = nx.DiGraph()
G.add_nodes_from(nodes)
G.add_edges_from(edges)

# visualize the network
fig, ax = plt.subplots(figsize=(30, 30))
plot_nyc = geodata_nyc.plot(alpha=0.5, edgecolor="k", ax=ax)
ctx.add_basemap(plot_nyc)
nx.draw_networkx(G, ax=ax, pos=nodes)

plt.xlim((xmin, xmax))
plt.ylim((ymin, ymax))
plt.show()
# Then we can plot just one layer at atime
fig, ax = plt.subplots(figsize=(5, 5))
HUC6.plot(ax=ax)
point_df.plot(ax=ax, color='red', marker='*')
ax.set_title("HUC Boundaries")
plt.show()

# %%
# To fix this we need to re-project
points_project = point_df.to_crs(gages_AZ.crs)

# Project the basins
HUC6_project = HUC6.to_crs(gages_AZ.crs)

# Adding a basemap
# ITS BEAUTIFUL
fig, ax = plt.subplots(figsize=(8, 10))
gages_AZ.plot(column='DRAIN_SQKM',
              categorical=False,
              legend=True,
              markersize=80,
              cmap='OrRd',
              ax=ax)
points_project.plot(ax=ax, color='red', marker='*', markersize=100)
HUC6_project.boundary.plot(ax=ax, color=None, edgecolor='black', linewidth=1)
ctx.add_basemap(ax, crs=gages.crs)
ax.set_title("Stream gages relative to HUC Boundaries")
ax.set(ylim=[1000000, 1450000], xlim=[-1550000, -1300000])

# %%
import geopandas as gpd
import geoplot
import matplotlib.pyplot as plt
import contextily as ctx

df_quakes = gpd.read_file("lastday.json")
df_quakes = df_quakes[df_quakes["mag"] != "-"]
df_quakes["mag_num"] = df_quakes["mag"].astype(float)
df_quakes = df_quakes[df_quakes.mag_num > 0]

extent = (950000, 2000000, 5800000, 6300000)
df_quakes.to_crs(epsg=3857)
ax = geoplot.pointplot(df_quakes,
                       color="red",
                       scale="mag_num",
                       limits=(0.5, 1.5))
ax.axis(extent)
ctx.add_basemap(ax, source=ctx.providers.Stamen.TonerLite, zoom=6)
plt.show()

#source=ctx.providers.BasemapAT.grau
Beispiel #11
0
def fig_uo_coverage_oa_diff(
    lad20cd: str,
    uo_coverage: dict,
    theta: float,
    all_groups: dict,
    networks: dict,
    save_dir: Path,
):
    """Show the coverage difference of each outupt area between the Urban Observatory
    network and networks optimised for the coverage of single objectives (single
    population sub-groups). Figure name:
    urb_obs_coverage_difference_oa_theta_{theta}_nsensors_{n_sensors}.png

    Parameters
    ----------
    lad20cd : str
        Local authority code
    uo_sensors : gpd.GeoDataFrame
        Urban Observatory sensor locations
    theta : float
        Coverage distance to use
    all_groups : dict
        Short name (keys) and long title (values) for each objective
    networks : dict
        Previous optimisation results (e.g. from
        networks_single_obj.make_single_obj_networks)
    save_dir : Path
        Directory to save figure
    """
    fig, grid = get_fig_grid()
    cmap = get_diff_cmap()
    n_uo_oa = uo_coverage["n_sensors"]

    for i, (name, params) in enumerate(all_groups.items()):
        uo_cov = uo_coverage[name]["oa_coverage"]
        uo_cov = pd.DataFrame(uo_cov).set_index("oa11cd")
        uo_cov.rename(columns={"coverage": "urb_obs"}, inplace=True)

        greedy_cov = networks[name][f"theta{theta}"][f"{n_uo_oa}sensors"][
            "oa_coverage"]
        greedy_cov = pd.DataFrame(greedy_cov).set_index("oa11cd")
        greedy_cov.rename(columns={"coverage": "greedy"}, inplace=True)

        compare_nets = uo_cov.join(greedy_cov)
        compare_nets["diff"] = compare_nets["greedy"] - compare_nets["urb_obs"]
        compare_nets["diff"].describe()

        oa_shapes = get_oa_shapes(lad20cd)
        oa_shapes = oa_shapes.join(compare_nets["diff"])

        vmin = -1
        vmax = 1
        oa_shapes.plot(column="diff",
                       alpha=0.85,
                       cmap=cmap,
                       ax=grid[i],
                       vmin=vmin,
                       vmax=vmax)

        ctx.add_basemap(
            grid[i],
            source="http://a.tile.stamen.com/toner/{z}/{x}/{y}.png",
            crs=oa_shapes.crs.to_epsg(),
        )

        grid[i].set_axis_off()
        grid[i].set_title(params["title"])

    add_scalebar(grid[1])
    add_colorbar(grid[-1],
                 cmap=cmap,
                 label="Coverage Difference",
                 vmin=vmin,
                 vmax=vmax)
    fig.suptitle(
        f"Comparisons with Urban Observatory Network (n = {n_uo_oa}, $\\theta$ = {theta} m)",
        y=0.87,
        fontsize=20,
    )

    t_str = f"theta_{theta}"
    n_str = f"nsensors_{n_uo_oa}"
    save_fig(fig, f"urb_obs_coverage_difference_oa_{t_str}_{n_str}.png",
             save_dir)
Beispiel #12
0
plt.rcParams['font.size'] = 12

import pandas as pd
data = pd.read_csv('AirKora_2019_2020_SH_100km.csv')

# SH

df = geopandas.read_file('D:\\OneDrive - SNU\\QGIS\\SH.shp')
df = df.to_crs(epsg=4326)

lon1, lon2, lat1, lat2 = 126.65, 126.9, 37.3, 37.5
extent = [lon1, lon2, lat1, lat2]

plt.figure()
ax = df.plot(figsize=(10, 10), alpha=1.0, edgecolor='r', facecolor='none')
ctx.add_basemap(ax, zoom=13, crs='epsg:4326')
plt.plot(data['lon'],
         data['lat'],
         color='blue',
         marker='X',
         linestyle='None',
         markersize=10,
         label='Monitoring Site')
ax.set_xlim(lon1, lon2)
ax.set_ylim(lat1, lat2)
plt.show()

# Sudokwon

df = geopandas.read_file(
    'D:\\OneDrive - SNU\\QGIS\\SIG_202101\\TL_SCCO_SIG.shp')
Beispiel #13
0
def map_mrt_temperature(database: SqliteUtil, kind: str):
    log.info('Profiling temperature extrema.')
    max_temp, min_temp, max_idx, min_idx = load_extrema(database, kind)

    log.info('Loading network links.')
    links = load_links(database)

    os.makedirs('result/mrt_temperatures/', exist_ok=True)

    log.info('Loading temperatures.')
    temps = defaultdict(lambda: [None] * (max_idx - min_idx + 1))
    query = f'''
        SELECT
            temperature_id,
            temperature_idx,
            {kind}
        FROM mrt_temperatures;
    '''
    database.cursor.execute(query)
    rows = database.fetch_rows()
    rows = counter(rows, 'Loading temperature profile %s.')

    for uuid, idx, temp in rows:
        temps[uuid][idx - min_idx] = temp
    
    def generate():
        for link in links:
            temp = temps[link.profile]
            yield (link.id, *temp, link.line)

    log.info('Forming dataframes.')
    
    cols = [f'temp_{idx}' for idx in range(min_idx, max_idx + 1)]
    df = pd.DataFrame(generate(), columns=('id', *cols, 'line'))
    df['line'] = gpd.GeoSeries(df['line'], crs='EPSG:2223')
    gpdf = gpd.GeoDataFrame(df, geometry='line', crs='EPSG:2223')
    gpdf = gpdf.to_crs(epsg=3857)

    del links, temps, df

    for idx in range(min_idx, max_idx + 1):
        fig, ax = plt.subplots(1, figsize=(20, 12))

        log.info(f'Plotting network visual.')
        plot = gpdf.plot(column=f'temp_{idx}', cmap='YlOrRd', linewidth=0.5, 
            ax=ax, alpha=1)

        ax.set_title(f'Maricopa {kind.upper()} Temperatures {idx_to_hhmm(idx)}',
            fontdict={'fontsize': '18', 'fontweight' : '3'})

        ctx.add_basemap(plot, source=ctx.providers.Stamen.TonerLite)

        sm = plt.cm.ScalarMappable(cmap='YlOrRd', 
            norm=plt.Normalize(vmin=min_temp, vmax=max_temp))
        sm._A = []
        cbar = fig.colorbar(sm)

        log.info(f'Saving map.')
        fig.savefig(f'result/mrt_temperatures1/{idx}.png', bbox_inches='tight')

        plt.clf()
        plt.close()
Beispiel #14
0
plt.ylabel('纬度',fontsize=18)    
plt.title("珠海市各区分局税务登记统计(单位纳税人)",fontsize=24)
# 叠加画上一分局的图层,透明度设大一点,以显示所覆盖的香洲、高新、保税、万山分局原来的颜色
gdf4.plot(ax=ax,color="blue",alpha=0.3)
# 标注数据
for idx, row in gdf3.iterrows():
    # 标注区分局名称及数量
    ax.text(row.lng, row.lat, s=str(row.fjNum+row.branch)+"\n"+str(row.DW), horizontalalignment='center', \
      color="blue",fontsize=14,bbox={'facecolor': "none", 'alpha':0.5, 'pad': 2, 'edgecolor':'none'})
    # 标注区分局机关坐标
    ax.scatter(row.lng_dz,row.lat_dz,marker="o",s=15,c="green")
    # 在区分局机关坐标旁标注编号,以便与上面的名称及数量对应
    ax.text(row.lng_dz, row.lat_dz, s=str(row.fjNum), color="green",fontsize=10)  
# 天地图中文注记实现方式一  
# 增加底图,已改成PNG格式,底图要放在最前面
ctx.add_basemap(ax,source=url)
# 增加中文注记,已改成PNG格式,非注记部分是透明的,不会遮挡底图
ctx.add_basemap(ax,source=url2)
plt.show()   

# 天地图中文注记实现方式二
 # xmin, xmax, ymin, ymax = ax.axis()
# from PIL import Image
# img, bbox = ctx.bounds2img(xmin,ymin,xmax,ymax,source = url)
# img2, bbox2 = ctx.bounds2img(xmin,ymin,xmax,ymax,source = url2)
# img1 = Image.fromarray(img, 'RGBA')
# ax.imshow(img1, extent=bbox)
# ax.axis((xmin, xmax, ymin, ymax))
# img3 = Image.fromarray(img2, 'RGBA')
# ax.imshow(img3, extent=bbox2)
# ax.axis((xmin, xmax, ymin, ymax))
Beispiel #15
0
def history_events_pdf():
    print('hello reports')
    req = request.args.to_dict(flat=True)
    user = query_user(req)
    data_historics = query_historics(request.args.to_dict(flat=True))
    data_incidents = query_incidents(request.args.to_dict(flat=True))
    data_report = pd.concat([data_historics, data_incidents])
    data_report = data_report.sort_index()
    data_geometry = data_report
    data_report['DATE_ENTRY'] = pd.to_datetime(data_report['DATE_ENTRY'])

    data_report.sort_values('DATE_ENTRY', inplace=True, ascending=False)
    data_report['DATE_ENTRY'] = data_report['DATE_ENTRY'].dt.tz_localize('GMT')
    column_keys = [
        'PLATE', 'INTERNAL_CODE', 'DATE_ENTRY', 'EVENT_NAME', 'VALUE',
        'ADDRESS', 'X', 'Y', 'SPEED', 'ORIENTATION', 'BATTERY', 'SHEET',
        'PERMANENCE'
    ]
    translaters = data_report['EVENT_NAME'].unique().tolist() + [
        x.lower() for x in column_keys
    ]
    data_report['EVENT_NAME'] = data_report['EVENT_NAME'].apply(
        lambda x: translate(x, translaters, user.locale))

    column_names = []

    for c in column_keys:
        column_names.append(translate(c.lower(), translaters, user.locale))

    data_report = data_report[column_keys]
    data_report.columns = column_names

    owner_id = str(user.owner_id)
    full_path = os.path.dirname(os.path.abspath(__file__))

    #data_report.to_csv(full_path+'/history_events'+owner_id+'.csv', index=False)
    data_geometry['X'] = data_geometry['X'].astype(float)
    data_geometry['Y'] = data_geometry['Y'].astype(float)

    data_geometry['GEOMETRY'] = data_geometry[['X', 'Y']].values.tolist()

    data_geometry['GEOMETRY'] = data_geometry['GEOMETRY'].apply(Point)
    data_geometry = geopandas.GeoDataFrame(data_geometry,
                                           geometry='GEOMETRY',
                                           crs='EPSG:4326')
    print(data_geometry.head(2))
    dfx = data_geometry

    geo_df = dfx.groupby(
        ['PLATE'])['GEOMETRY'].apply(lambda x: LineString(x.tolist()))
    geo_df = geopandas.GeoDataFrame(geo_df, geometry='GEOMETRY')

    fig, myax = plt.subplots(figsize=(8, 8))
    geo_df.plot(ax=myax,
                color='black',
                linestyle='-',
                marker='o',
                linewidth=3,
                alpha=0.3)
    data_geometry.plot(ax=myax, color='yellow', marker='o')

    data_geometry.head(1).plot(ax=myax,
                               color='red',
                               marker='x',
                               markersize=400)
    data_geometry.tail(1).plot(ax=myax,
                               color='green',
                               marker='x',
                               markersize=400)

    minx, miny, maxx, maxy = data_geometry.total_bounds

    myax.set_xlim(
        minx - .1, maxx + .1
    )  # added/substracted value is to give some margin around total bounds
    myax.set_ylim(miny - .1, maxy + .1)
    ctx.add_basemap(myax, crs=data_geometry.crs.to_string())

    for i, (x, y, label) in enumerate(
            zip(data_geometry.geometry.x, data_geometry.geometry.y,
                data_geometry.PERMANENCE)):
        if str(label).strip() != 'nan':
            myax.annotate(label,
                          xy=(x, y),
                          xytext=(-15, 1),
                          textcoords="offset points")

    myax.set_title("WGS84 (lat/lon)")
    myax.get_figure().savefig('history_events' + owner_id + '.png')
    table = generate_html(data_report, 'history_events' + owner_id + '.png')
    ret = generate_pdf(table)
    with open('history_events' + owner_id + '.pdf', 'wb') as w:
        w.write(ret)

    return send_from_directory(full_path,
                               'history_events' + owner_id + '.pdf',
                               as_attachment=True)
Beispiel #16
0
print("Calculating distances")
grid_small["distance"] = grid_small.apply(
    lambda row: get_nearest(row.geometry.centroid, station_points), axis=1
)
print("Distances done")

grid_small = grid_small.to_crs(epsg=3857)
stations = stations.to_crs(epsg=3857)
classifier = mapclassify.UserDefined.make(bins=[250, 800])
grid_small["min"] = grid_small[["distance"]].apply(classifier)

print("Plotting the map")
ax = grid_small.plot(column="min", cmap="RdYlBu", linewidth=0, alpha=0.6,
                     figsize=(30, 20))
stations.plot(ax=ax, markersize=3, color="black")
ctx.add_basemap(ax, source=ctx.providers.OpenStreetMap.Mapnik)

LegendElement = [
    mpatches.Patch(color='#A50026', label='<3min'),
    mpatches.Patch(color='#FFF7B3', label='3-8min'),
    mpatches.Patch(color='#323896', label='>8min'),
    Line2D([0], [0], marker='o', color='black', label='Bike station',
           markerfacecolor='g', markersize=3)
]

ax.legend(handles=LegendElement)
# Crop the map according grid
minx, miny, maxx, maxy = grid_small.total_bounds
ax.set_xlim(minx, maxx)
ax.set_ylim(miny, maxy)
Beispiel #17
0
def test_add_basemap():
    # Plot boulder bbox as in test_place
    x1, x2, y1, y2 = [
        -11740727.544603072,
        -11701591.786121061,
        4852834.0517692715,
        4891969.810251278,
    ]

    # Test web basemap
    fig, ax = matplotlib.pyplot.subplots(1)
    ax.set_xlim(x1, x2)
    ax.set_ylim(y1, y2)
    ax = ctx.add_basemap(ax, zoom=10)

    # ensure add_basemap did not change the axis limits of ax
    ax_extent = (x1, x2, y1, y2)
    assert ax.axis() == ax_extent

    assert ax.images[0].get_array().sum() == 75853866
    assert ax.images[0].get_array().shape == (256, 512, 3)
    assert_array_almost_equal(ax.images[0].get_array().mean(),
                              192.90635681152344)

    # Test local source
    f, ax = matplotlib.pyplot.subplots(1)
    ax.set_xlim(x1, x2)
    ax.set_ylim(y1, y2)
    loc = ctx.Place(SEARCH, path="./test2.tif", zoom_adjust=ADJUST)
    ax = ctx.add_basemap(ax, url="./test2.tif")

    raster_extent = (
        -11740803.981631357,
        -11701668.223149346,
        4852910.488797557,
        4892046.247279563,
    )
    assert_array_almost_equal(raster_extent, ax.images[0].get_extent())
    assert ax.images[0].get_array().sum() == 34840247
    assert ax.images[0].get_array().shape == (256, 256, 3)
    assert_array_almost_equal(ax.images[0].get_array().mean(),
                              177.20665995279947)

    # Test with auto-zoom
    f, ax = matplotlib.pyplot.subplots(1)
    ax.set_xlim(x1, x2)
    ax.set_ylim(y1, y2)
    ax = ctx.add_basemap(ax, zoom="auto")

    ax_extent = (
        -11740727.544603072,
        -11691807.846500559,
        4852834.0517692715,
        4891969.810251278,
    )
    assert_array_almost_equal(ax_extent, ax.images[0].get_extent())
    assert ax.images[0].get_array().sum() == 723918764
    assert ax.images[0].get_array().shape == (1024, 1280, 3)
    assert_array_almost_equal(ax.images[0].get_array().mean(),
                              184.10206197102863)

    # Test on-th-fly warping
    x1, x2 = -105.5, -105.00
    y1, y2 = 39.56, 40.13
    f, ax = matplotlib.pyplot.subplots(1)
    ax.set_xlim(x1, x2)
    ax.set_ylim(y1, y2)
    ax = ctx.add_basemap(ax, crs={"init": "epsg:4326"}, attribution=None)
    assert ax.get_xlim() == (x1, x2)
    assert ax.get_ylim() == (y1, y2)
    assert ax.images[0].get_array().sum() == 724238693
    assert ax.images[0].get_array().shape == (1135, 1183, 3)
    assert_array_almost_equal(ax.images[0].get_array().mean(),
                              179.79593258881636)
    # Test local source warping
    _ = ctx.bounds2raster(x1, y1, x2, y2, "./test2.tif", ll=True)
    f, ax = matplotlib.pyplot.subplots(1)
    ax.set_xlim(x1, x2)
    ax.set_ylim(y1, y2)
    ax = ctx.add_basemap(ax,
                         url="./test2.tif",
                         crs={"init": "epsg:4326"},
                         attribution=None)
    assert ax.get_xlim() == (x1, x2)
    assert ax.get_ylim() == (y1, y2)
    assert ax.images[0].get_array().sum() == 724238693
    assert ax.images[0].get_array().shape == (1135, 1183, 3)
    assert_array_almost_equal(ax.images[0].get_array().mean(),
                              179.79593258881636)
 def _add_basemap(self, ax):
     if self.zoom:
         ctx.add_basemap(ax, zoom=self.zoom)
     else:
         ctx.add_basemap(ax)
rivers_project = AZ_rivers.to_crs(gages_AZ.crs)

# %%
# Plotting the map

# Zoom  in and just look at AZ
gages.columns
gages.STATE.unique()
gages_AZ = gages[gages['STATE'] == 'AZ']
gages_AZ.shape

# Plotting
fig, ax = plt.subplots(figsize=(5, 10))

ax.set_title('Verde River Basin', fontsize=15)

basins_project.plot(ax=ax,
                    label='Basins',
                    edgecolor='black',
                    alpha=0.3,
                    legend=True)
gages_AZ.plot(ax=ax, label='Gauges', marker='*', color='green')
rivers_project.plot(ax=ax, label='Verde River', color='blue')
ax.legend()
ctx.add_basemap(ax)

plt.show()
fig.savefig('Salcedo_map.png')

# %%
Beispiel #20
0
def plot_exposure_ss(exposures, point=None):
    if point is not None:
        fig, ax = plt.subplots(figsize=(15, 15),
                               subplot_kw=dict(projection=ccrs.Mercator()))
        ax.scatter(exposures[point:point + 1].geometry[:].x,
                   exposures[point:point + 1].geometry[:].y,
                   c='k',
                   marker='+',
                   s=800)
        ax.set_xlim(-9931038.907412536, -9926684.253858147)
        ax.set_ylim(1536680.51725147, 1539512.429812354)
    else:
        # create new map for viviendas
        cmap_viv = plt.cm.get_cmap('autumn', 4)
        cmap_viv = mpl.colors.LinearSegmentedColormap.from_list(
            'Custom cmap', [cmap_viv(i) for i in range(cmap_viv.N)],
            cmap_viv.N)
        # create new map for aups
        cmap_aup = plt.cm.get_cmap('winter', 4)
        cmap_aup = mpl.colors.LinearSegmentedColormap.from_list(
            'Custom cmap', [cmap_aup(i) for i in range(cmap_aup.N)],
            cmap_aup.N)

        # define the bins and normalize
        bounds_aup = np.array([6000, 8800, 10000, 12000, 14600])
        norm_aup = mpl.colors.BoundaryNorm(bounds_aup, cmap_aup.N)
        bounds_viv = np.array([7500, 11000, 16500, 33000, 56300])
        norm_viv = mpl.colors.BoundaryNorm(bounds_viv, cmap_viv.N)

        exp_merc_aup = exposures[exposures.category == 1]
        exp_merc_house = exposures[exposures.category == 2]

        fig, ax = plt.subplots(figsize=(15, 15),
                               subplot_kw=dict(projection=ccrs.Mercator()))
        clr_1 = ax.scatter(exp_merc_aup.geometry[:].x,
                           exp_merc_aup.geometry[:].y,
                           c=exp_merc_aup.value.values,
                           marker='+',
                           s=25,
                           cmap=cmap_aup,
                           norm=norm_aup)
        clr_2 = ax.scatter(exp_merc_house.geometry[:].x,
                           exp_merc_house.geometry[:].y,
                           c=exp_merc_house.value.values,
                           marker='o',
                           s=8,
                           cmap=cmap_viv,
                           norm=norm_viv)

        lines_legend = []
        text_legend = []
        for i_col, x_col in enumerate(np.linspace(0, 1, 4)):
            lines_legend.append(
                mpl.lines.Line2D(range(1),
                                 range(1),
                                 color='white',
                                 marker='o',
                                 markerfacecolor=cmap_viv(x_col)))
            text_legend.append(
                str(bounds_viv[i_col]) + ' - ' + str(bounds_viv[i_col + 1]))
        legend1 = plt.legend(lines_legend,
                             text_legend,
                             numpoints=1,
                             loc=4,
                             title='no AUP housing')

        lines_legend = []
        text_legend = []
        for i_col, x_col in enumerate(np.linspace(0, 1, 4)):
            lines_legend.append(
                mpl.lines.Line2D(range(1),
                                 range(1),
                                 color=cmap_aup(x_col),
                                 marker='+',
                                 markerfacecolor=cmap_aup(x_col)))
            text_legend.append(
                str(bounds_aup[i_col]) + ' - ' + str(bounds_aup[i_col + 1]))
        plt.legend(lines_legend,
                   text_legend,
                   numpoints=1,
                   loc=3,
                   title='AUP housing')
        plt.gca().add_artist(legend1)

    ctx.add_basemap(ax, zoom=15, url=ctx.sources.OSM_C, origin='upper')
    scale_bar(ax, 0.5, location=(0.93, 0.4), linewidth=2)
    rect = patches.Rectangle((-9931033.307412536, 1536686.51725147),
                             4345.053554389253,
                             2934.0125608841423,
                             linewidth=2,
                             edgecolor='r',
                             facecolor='none',
                             zorder=200)
    ax.add_patch(rect)
    ax.set_axis_off()
    if point is not None:
        # fig.savefig('ss_point_'+str(point)+'.png', format='png', bbox_inches='tight')
        return fig
    else:
        # fig.savefig('ss_points.png', format='png', bbox_inches='tight')
        return fig
Beispiel #21
0
pc_count.name='sale_counts'

# %% [markdown]
# To plot a choropleth, we use Geopandas, a geospatial software that is an extension of pandas. Geopandas accepts most spatial filetypes as input to generate a basic mapping which we can attach relevant data to. In this case, I generated a polygon shapefile using QGIS based on the sector code. Look out a future tutorial on how to generate shapefiles using QGIS!
#
# Next, add the total sale counts from each sector to their respective polygon features. Since geopandas functions similarly to pandas, we can use functions like pd.concat to concatenate the data. Once added, plot the shapefile with the simple `.plot()` command. To add a baselayer to the choropleth, install `contextily`. The package automatically pulls rasterized images from openstreetmap.
#

# %% Plot
shapefile = Path('/home/jovyan/shared-notebooks/notebooks/HDB_analysis/data/processed/sectorcode/sectorcode.shp')
sgshp = geopandas.read_file(shapefile)
sgshp_3857 = sgshp.to_crs(epsg=3857)
sale_counts_shp = pd.concat([sgshp_3857.set_index('sectorcode'),pc_count],axis=1)

plt.close('all')
fig = plt.figure(figsize=(30, 10))
ax = fig.add_subplot()
sale_counts_shp.plot(ax=ax,
                     column='sale_counts',
                     alpha=0.9,
                     edgecolor='k',
                     missing_kwds={'color': 'lightgrey', 'alpha':0.1},
                     legend=True,
                     )
ctx.add_basemap(ax=ax, source=ctx.providers.Stamen.TonerLite)
ax.set_axis_off()
ax.set_title('Number of resale transactions from 2019-01 to 2020-03')

# %% [markdown]
# Recent sales are generally concentrated in areas with many apartments that reached MOP. 
Beispiel #22
0
def generate_image(street: T_Series) -> typing.Tuple[T_Figure, T_Axis]:
    # forcing the aspect ratio slightly resizes the inset. i'm not sure how to
    # get the exact figure positions of the inset after resize, so a correction is
    # manually determined by trial and error and then applied.
    inset_adjust = 0.01

    # initialize colors
    colormap = cm.get_cmap("magma", 12)
    normalize = colors.Normalize(vmin=gdf_merged[column].min(),
                                 vmax=gdf_merged[column].max())

    # initialize figure
    (figure, axis) = pyplot.subplots(figsize=(14, 7))

    # colorbar
    colorbar = figure.colorbar(cm.ScalarMappable(norm=normalize,
                                                 cmap=colormap),
                               ax=axis)
    colorbar.set_label("Summer land surface temperature 2002-2008 [°F]",
                       rotation=270,
                       labelpad=10)

    # main plot
    gdf_merged.plot(ax=axis, column=column, cmap=colormap, legend=False)

    # create extra room on the right for the inset
    xlim = axis.get_xlim()
    axis.set_xlim([xlim[0], xlim[1] + 2e4])

    # inset plot
    inset = axis.inset_axes(inset_position)
    gdf_plot = geopandas.GeoDataFrame(street).T
    gdf_plot.crs = gdf_roads.crs
    gdf_plot.plot(ax=inset,
                  color=colormap(normalize(street[column])),
                  linewidth=inset_linewidth,
                  zorder=2)
    inset.margins(x=margin, y=margin)

    # roads for which we have no data. we need to disable autoscale first because
    # these functions contain data far from the road in question
    inset.autoscale(False)
    gdf_merged.plot(ax=inset,
                    column=column,
                    cmap=colormap,
                    alpha=0.3,
                    linewidth=inset_linewidth,
                    zorder=1)
    gdf_roads_nodata.plot(ax=inset,
                          color="black",
                          linestyle="--",
                          alpha=0.3,
                          linewidth=4,
                          zorder=1,
                          label="no data")
    inset.legend()

    # set inset to be equal ratio (other plotting functions can change this, so we
    # run it after all plotting functions that interact with the inset). we also
    # need to update the xlim and ylim at this point
    make_square(inset)
    inset_xlim = inset.get_xlim()
    inset_ylim = inset.get_ylim()

    # rectangle showing the location of the inset
    # transformer credit: https://stackoverflow.com/a/40475221
    axis_to_data = axis.transAxes + axis.transData.inverted()
    inset_coords_bottomleft = (inset_xlim[0], inset_ylim[0])
    inset_coords_topleft = (inset_xlim[0], inset_ylim[1])
    inset_pos_bottomleft = axis_to_data.transform(
        (inset_position[0] + inset_adjust, inset_position[1]))
    inset_pos_topleft = axis_to_data.transform(
        (inset_position[0] + inset_adjust,
         inset_position[1] + inset_position[3]))
    gdf_inset_location = geopandas.GeoDataFrame([{
        "geometry":
        shapely.geometry.Polygon([
            inset_coords_bottomleft, inset_coords_topleft, inset_pos_topleft,
            inset_pos_bottomleft
        ])
    }])
    gdf_inset_location.plot(ax=axis, color="black", zorder=3, alpha=0.5)

    # remove all coordinate ticks
    axis.set_xticks([])
    axis.set_yticks([])
    inset.set_xticks([])
    inset.set_yticks([])

    # text
    axis.text(-7892500,
              5220000,
              "The average land surface temperature for",
              fontsize=12,
              horizontalalignment="center")
    axis.text(-7892500,
              5218400,
              "{house_numbers} {street_name}".format(
                  house_numbers=address_range_from_tlid(street["TLID"]),
                  street_name=street["FULLNAM"]),
              fontsize=22,
              horizontalalignment="center")
    axis.text(-7892500,
              5217000,
              "was",
              fontsize=15,
              horizontalalignment="center")
    axis.text(-7892500,
              5214500,
              "{:.1f}°F".format(street["LST_weighted"]),
              fontsize=40,
              horizontalalignment="center")
    axis.text(-7892500,
              5213000,
              "in the summers from 2002-2008",
              fontsize=15,
              horizontalalignment="center")

    # basemaps
    contextily.add_basemap(ax=axis, source=basemap)
    contextily.add_basemap(ax=inset, source=inset_basemap)

    # the above basemap code may have resized the inset
    inset.set_xlim(inset_xlim)

    pyplot.tight_layout()

    return (figure, axis)
Beispiel #23
0
def test_add_basemap():
    # Plot boulder bbox as in test_place
    x1, x2, y1, y2 = [
        -11740727.544603072,
        -11701591.786121061,
        4852834.0517692715,
        4891969.810251278,
    ]

    # Test web basemap
    fig, ax = matplotlib.pyplot.subplots(1)
    ax.set_xlim(x1, x2)
    ax.set_ylim(y1, y2)
    ctx.add_basemap(ax, zoom=10)

    # ensure add_basemap did not change the axis limits of ax
    ax_extent = (x1, x2, y1, y2)
    assert ax.axis() == ax_extent

    assert ax.images[0].get_array().sum() == 51551927
    assert ax.images[0].get_array().shape == (256, 256, 4)
    assert_array_almost_equal(ax.images[0].get_array()[:,:,:3].mean(), 177.20665995279947)
    assert_array_almost_equal(ax.images[0].get_array().mean(), 196.654995)

    # Test local source
    ## Windowed read
    subset = (
        -11730803.981631357,
        -11711668.223149346,
        4862910.488797557,
        4882046.247279563,
    )

    f, ax = matplotlib.pyplot.subplots(1)
    ax.set_xlim(subset[0], subset[1])
    ax.set_ylim(subset[2], subset[3])
    loc = ctx.Place(SEARCH, path="./test2.tif", zoom_adjust=ADJUST)
    ctx.add_basemap(ax, source="./test2.tif", reset_extent=True)

    assert_array_almost_equal(subset, ax.images[0].get_extent())
    assert ax.images[0].get_array().sum() == 3187219
    assert ax.images[0].get_array()[:,:,:3].sum() == 2175124
    assert ax.images[0].get_array().shape == (64, 64, 4)
    assert_array_almost_equal(ax.images[0].get_array()[:,:,:3].mean(), 177.01204427083334)
    assert_array_almost_equal(ax.images[0].get_array().mean(), 194.53240966796875)
    ## Full read
    f, ax = matplotlib.pyplot.subplots(1)
    ax.set_xlim(x1, x2)
    ax.set_ylim(y1, y2)
    loc = ctx.Place(SEARCH, path="./test2.tif", zoom_adjust=ADJUST)
    ctx.add_basemap(ax, source="./test2.tif", reset_extent=False)

    raster_extent = (
        -11740880.418659642,
        -11662608.901695622,
        4774715.408861821,
        4931258.442789858,
    )
    assert_array_almost_equal(raster_extent, ax.images[0].get_extent())
    assert ax.images[0].get_array()[:,:,:3].sum() == 76248416
    assert ax.images[0].get_array().sum() == 109671776
    assert ax.images[0].get_array().shape == (512, 256, 4)
    assert_array_almost_equal(ax.images[0].get_array()[:,:,:3].mean(), 193.90974934895834)
    assert_array_almost_equal(ax.images[0].get_array().mean(), 209.18231201171875)

    # Test with auto-zoom
    f, ax = matplotlib.pyplot.subplots(1)
    ax.set_xlim(x1, x2)
    ax.set_ylim(y1, y2)
    ctx.add_basemap(ax, zoom="auto")

    ax_extent = (
        -11740727.544603072,
        -11701591.786121061,
        4852834.051769271,
        4891969.810251278,
    )
    assert_array_almost_equal(ax_extent, ax.images[0].get_extent())
    assert ax.images[0].get_array()[:,:,:3].sum() == 563185119
    assert ax.images[0].get_array().sum() == 830571999
    assert ax.images[0].get_array().shape == (1024, 1024, 4)
    assert_array_almost_equal(ax.images[0].get_array()[:,:,:3].mean(), 179.03172779083252)
    assert_array_almost_equal(ax.images[0].get_array().mean(), 198.023796)

    # Test on-th-fly warping
    x1, x2 = -105.5, -105.00
    y1, y2 = 39.56, 40.13
    f, ax = matplotlib.pyplot.subplots(1)
    ax.set_xlim(x1, x2)
    ax.set_ylim(y1, y2)
    ctx.add_basemap(ax, crs={"init": "epsg:4326"}, attribution=None)
    assert ax.get_xlim() == (x1, x2)
    assert ax.get_ylim() == (y1, y2)
    assert ax.images[0].get_array()[:,:,:3].sum() == 724238693
    assert ax.images[0].get_array().shape == (1135, 1183, 4)
    assert_array_almost_equal(ax.images[0].get_array()[:,:,:3].mean(), 179.79593258881636)
    assert_array_almost_equal(ax.images[0].get_array().mean(), 198.596949)
    # Test local source warping
    _ = ctx.bounds2raster(x1, y1, x2, y2, "./test2.tif", ll=True)
    f, ax = matplotlib.pyplot.subplots(1)
    ax.set_xlim(x1, x2)
    ax.set_ylim(y1, y2)
    ctx.add_basemap(
        ax, source="./test2.tif", crs={"init": "epsg:4326"}, attribution=None
    )
    assert ax.get_xlim() == (x1, x2)
    assert ax.get_ylim() == (y1, y2)

    assert ax.images[0].get_array()[:,:,:3].sum() == 464536503
    assert ax.images[0].get_array().shape == (980, 862, 4)
    assert_array_almost_equal(ax.images[0].get_array()[:,:,:3].mean(), 183.301175)

    assert ax.images[0].get_array().sum() == 678981558
    assert_array_almost_equal(ax.images[0].get_array().mean(), 200.939189)

    x1, x2, y1, y2 = [
        -11740727.544603072,
        -11701591.786121061,
        4852834.0517692715,
        4891969.810251278,
    ]
Beispiel #24
0
#leg.set_bbox_to_anchor((1.1,0.5,0.1,0.5))                          # Adjusted numbers to find the best location and size of the legend
#%%
#plot with background map
Total = join['NumberOfReports'].sum()
df = join.to_crs(epsg=3857)
ax = df.plot(column='NumberOfReports',
             cmap='Greens',
             figsize=(10, 8),
             alpha=0.8,
             legend=True)
ax.set_title(label='Number of Gas Leak Reports per Census Tract (' +
             'January' + ')' + '( Total number of reports: ' + str(Total) +
             ')',
             fontdict={'fontsize': 16},
             loc='center')
ctx.add_basemap(ax, zoom=12)

# %%
#print(us_tract['COUNTYFP'])

#nyc_county = ['005','047','061','081','085']
#nyc_tract = us_tract[(us_tract['COUNTYFP'] == nyc_county[0]) | (us_tract['COUNTYFP'] == nyc_county[1]) | (us_tract['COUNTYFP'] == nyc_county[2]) | (us_tract['COUNTYFP'] == nyc_county[3]) |( us_tract['COUNTYFP'] == nyc_county[4])]

count = 0
for row in range(0, len(Month_data)):
    ind = Month_data.loc[Month_data['Geoid'] == us_tract['Geoid'][row]].index
    if len(ind) is 0:
        #print(us_tract['Geoid'][row])
        count += 1

print(count)
Beispiel #25
0
def plot_coverage_grid(
    lad20cd,
    grid_cov,
    crs={"init": "epsg:27700"},
    threshold=0.005,
    alpha=0.75,
    ax=None,
    legend=True,
    title="",
    figsize=(15, 15),
    vmin=0,
    vmax=1,
    save_path=None,
    cmap="viridis",
):
    """Generate a square grid of points and show them on a map coloured by
    coverage due to the closest sensor to each grid point.

    Arguments:
        grid_cov {GeoDataFrame} -- Grid squares and coverage as calculated by
        utils.coverage_grid.

    Keyword Arguments:
        crs {dict} -- coordinate reference system of sensor locations
        (default: {{'init': 'epsg:27700'}})
        threshold {float} -- only plot grid points with coverage above this
        value (default: {0.25})
        alpha {float} -- transparency of grid points (default: {0.75})
        title {str} -- plot title (default: {""})
        figsize {tuple} -- plot figure size (default: {(20,20)})
        vmin {float} -- min coverage value for colorbar range (default: {0})
        vmax {float} -- max coverage value for colorbar range (default: {1})
        save_path {str} -- path to save output plot or None to not save
        (default: {None})

    Returns:
        fig, ax -- matplotlib figure and axis objects for the created plot.
    """
    if ax is None:
        ax = plt.figure(figsize=figsize).gca()

    if legend:
        cax = get_color_axis(ax)
        cax.set_title("Coverage")
    else:
        cax = None

    oa_shapes = get_oa_shapes(lad20cd)
    oa_shapes.plot(ax=ax, edgecolor="k", facecolor="None")
    grid_cov.plot(
        column="coverage",
        cmap=cmap,
        alpha=[alpha if abs(c) > threshold else 0 for c in grid_cov["coverage"]],
        ax=ax,
        vmin=vmin,
        vmax=vmax,
        cax=cax,
        legend=legend,
    )
    ctx.add_basemap(
        ax, source="http://a.tile.stamen.com/toner/{z}/{x}/{y}.png", crs=crs
    )
    ax.set_axis_off()
    ax.set_title(title, fontsize=20)

    if save_path:
        plt.savefig(save_path, dpi=200)
        plt.close()

    return ax
Beispiel #26
0
# A popular use of rasters is in the context of web tiles, which are a way of quickly obtaining geographical context to present spatial data. In Python, we can use [`contextily`](https://github.com/darribas/contextily) to pull down tiles and display them along with our own geographic data. Let us first import it the package:

# In[85]:

import contextily as cx

# In[88]:

lsoas.crs

# We can begin by creating a map in the same way we would do normally, and then use the `add_basemap` command to, er, add a basemap:

# In[86]:

ax = lsoas.plot(alpha=0.5)
cx.add_basemap(ax, crs=lsoas.crs)

# Note that we need to be explicit when adding the basemap to state the coordinate reference system (`crs`) our data is expressed in, `contextily` will not be able to pick it up otherwise. Conversely, we could change our data's CRS into [Pseudo-Mercator](http://epsg.io/3857), the native reference system for most web tiles:

# In[89]:

lsoas_wm = lsoas.to_crs(epsg=3857)
ax = lsoas_wm.plot(alpha=0.5)
cx.add_basemap(ax)

# Note how the coordinates are different but, if we set it right, either approach aligns tiles and data nicely.

# Web tiles can be integrated with other features of maps in a similar way as we have seen above. So, for example, we can change the size of the map, and remove the axis:

# In[90]:
Beispiel #27
0
def plot_optimisation_result(
    result,
    title=None,
    save_path=None,
    ax=None,
    figsize=(10, 10),
    fill_oa=True,
    cmap="YlGn",
    legend=True,
    alpha=0.75,
    sensor_size=36,
    sensor_color="darkgreen",
    sensor_edgecolor="white",
    sensor_linewidth=1.5,
    fontsize=20,
    show=True,
    vmin=0,
    vmax=1,
):
    """
    Plot map with sensor locations (red points), output area centroids (black points),
    and coverage (shaded areas).
    """
    sensors = pd.DataFrame(result["sensors"])
    sensors.set_index("oa11cd", inplace=True)

    oa_coverage = pd.DataFrame(result["oa_coverage"])
    oa_coverage.set_index("oa11cd", inplace=True)

    oa_shapes = get_oa_shapes(result["lad20cd"])

    oa_shapes["coverage"] = oa_coverage

    if ax is None:
        ax = plt.figure(figsize=figsize).gca()

    # to make colorbar same size as graph:
    # https://www.science-emergence.com/Articles/How-to-match-the-colorbar-size-with-the-figure-size-in-matpltolib-/
    if legend and fill_oa:
        cax = get_color_axis(ax)
        cax.set_title("Coverage")
    else:
        cax = None

    if fill_oa:
        ax = oa_shapes.plot(
            column="coverage",
            alpha=alpha,
            cmap=cmap,
            legend=legend,
            ax=ax,
            cax=cax,
            vmin=vmin,
            vmax=vmax,
        )
    else:
        ax = oa_shapes.plot(
            alpha=alpha, ax=ax, facecolor="none", edgecolor="none", linewidth=0.5
        )

    ax.scatter(
        sensors["x"],
        sensors["y"],
        s=sensor_size,
        color=sensor_color,
        edgecolor=sensor_edgecolor,
        linewidth=sensor_linewidth,
    )

    ctx.add_basemap(
        ax,
        source="http://a.tile.stamen.com/toner/{z}/{x}/{y}.png",
        crs=oa_shapes.crs.to_epsg(),
    )

    ax.set_axis_off()
    if title is None:
        ax.set_title(
            "n_sensors = {:.0f}, coverage = {:.2f}".format(
                len(sensors), result["total_coverage"]
            ),
            fontsize=fontsize,
        )
    else:
        ax.set_title(title)

    if save_path:
        plt.tight_layout()
        plt.savefig(save_path, dpi=200)
        plt.close()
    elif show:
        plt.show()
Beispiel #28
0
usgs_sg_point.plot(ax=ax, color='red', marker='o', zorder=5, label="USGS SG")
Verde_end_point.plot(ax=ax,
                     color='black',
                     marker="x",
                     zorder=5,
                     label="Verde River End")
Phoenix_point.plot(ax=ax, color='c', marker="P", zorder=5, label="Phoenix, AZ")
Watersheds_project.boundary.plot(ax=ax,
                                 color=None,
                                 edgecolor='black',
                                 linewidth=.75,
                                 label="Watershed Boundaries",
                                 zorder=1)
ax.set_title("Verde River Watershed")
ax.set(ylim=[1.2e6, 1.6e6], xlim=[-1.6e6, -1.3e6])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
ax.set_xlabel('Easting (m)')
ax.set_ylabel('Northing (m)')
ctx.add_basemap(ax, url=ctx.providers.OpenTopoMap, crs=gages_AZ.crs)
plt.savefig("Verde_River_Watershed.png")
plt.show()

# %%
# This is to display the image you will get if you download the data
# above and run the code.

im = Image.open("assets/Verde_River_Watershed.png")
im.show()

# %%
Beispiel #29
0
def makeMap(row):
    var = row["Variable"].upper()

    plotname = f"plots/{var}_map.png"
    if os.path.exists(plotname):
        print(f"Skipping {plotname} (exists)")
        return

    print(f"Making map for {row['Variable']} with type {row['Maptype']}")

    var = row["Variable"].upper()
    maptype = row["Maptype"]
    try:
        desc = "\n".join(textwrap.wrap(description[var], 50))
    except KeyError:
        print(f"{var} not in description")

    try:
        sub = dat[[var, "pdet"]]
    except KeyError:
        print(f"{var} not found in data")
        return

    leg = ""
    cat = False
    if maptype == "satisfaction":
        #fn = lambda x: np.mean([v for v in x if v in [1,2,3,4]])
        fn = lambda x: sum([v in [3, 4] for v in x])
        leg = "\n\n% satisfied"

    elif maptype == "yesno":
        fn = lambda x: sum(x == 1)
        leg = "\n\n% yes"
    elif maptype == "cat":
        variableLookup = lookup(cb=codebook, idx=var)
        sub[var] = sub[var].fillna(-99)
        sub[var] = sub[var].astype("int32")
        sub[var] = variableLookup(sub[var])
        fn = lambda x: x.mode()[0]
        cat = True
    else:
        print(f"Unknown type: {maptype}")
        return

    summary = sub.groupby("pdet")
    c = sub["pdet"].value_counts()

    a = summary[var].agg([fn])
    a = a.merge(c, left_index=True, right_index=True)

    if not cat:
        a["pst"] = (a[a.columns[0]] / a[a.columns[1]]) * 100

    a = shp.merge(a, left_on="pdet", right_index=True)

    plt.clf()

    b = depts.plot(figsize=(14, 14),
                   color="#989898",
                   edgecolor="#101010",
                   alpha=0.4)

    if not cat:
        mapPlot = a.plot(
            column="pst",
            ax=b,
            legend=True,
            edgecolor="#606060",
            cmap=ListedColormap(sns.color_palette(
                "RdBu_r")),  #"viridis",#alpha = 0.9,#cmap="rainbow",

            #sns.diverging_palette(220, 20, n=7)
            legend_kwds={"label": leg})
    else:
        mapPlot = a.plot(column="<lambda>",
                         ax=b,
                         cmap=mapsCmap,
                         legend=True,
                         edgecolor="#606060",
                         categorical=True)

    minx, miny, maxx, maxy = depts.total_bounds
    mapPlot.set_xlim(minx - 200000, maxx + 200000)

    mapPlot.set_axis_off()
    ctx.add_basemap(mapPlot, source=ctx.providers.Esri.WorldPhysical, zoom=8)

    plt.subplots_adjust(top=0.78)
    plt.title(desc, pad=35)
    plt.savefig(plotname)
Beispiel #30
0
def map_top_das(start_date, end_date,
                da_num=20, column='teranet_sales_count',
                minx_coef=1, maxx_coef=1, miny_coef=1, maxy_coef=1,
                display_pin_counts=False):
    """
    v1.0
    function that maps top 20 DAs by values in 'column' on a basemap
    plots a barchart of these values,
    and displays counts of Teranet records by 'pin' from these top DAs (optional)

    Plots data from the global variable (GeoDataFrame) 'teranet_da_gdf'

    uses another user-defined function 'unique_pins' to analyze 'pin's in the
    subset of Teranet records from top DAs

    ----------------
    Input arguments:  da_num     -- int    -- number of top DAs to be plotted (default=20)

                      column     -- string -- name of the column to be used for sorting
                                              (default='teranet_sales_count')

                      minx_coef  -- float  -- min x coefficient to be used for zooming the map (default=1)
                      maxx_coef  -- float  -- max x coefficient to be used for zooming the map (default=1)
                      miny_coef  -- float  -- min y coefficient to be used for zooming the map (default=1)
                      maxy_coef  -- float  -- max y coefficient to be used for zooming the map (default=1)

                      display_pin_counts -- bool -- option to display counts of Teranet records by pins
                                                    from the subset of Teranet records from top DAs
                                                    (default=False)
    --------
    Returns:     None, plots top 20 DAs with a basemap and a barchart

    -----------------
    Global variables: teranet_da_subset_df -- DataFrame    -- DataFrame with subset of Teranet sales
                                                              generated in steps 1.1, 2.1, 3.1

                      teranet_da_gdf       -- GeoDataFrame -- GeoDataFrame with dissimination areas
                                                              joined with Teranet aggregate
                                                              generated in step 1.2, 2.2, 3.2

                      unique_pins          -- user func    -- function 'unique_pins' used to display
                                                              counts of pins from subset of Teranet
                                                              records from top 'da_num' DAs
                                                              (used if 'display_pin_counts'=True)
    """
    # global variables
    global teranet_da_subset_df, teranet_da_gdf, unique_pins

    # map top 20 DAs by column values with a basemap

    # create a list of indexes of top DAs by 'column' values
    top_da_ids = teranet_da_gdf[column].sort_values(ascending=False)[:da_num].index

    # create figure and axis
    f, ax = plt.subplots(1, figsize=(12, 12))

    # plot top 20 DAs by count of Teranet sales for the time period
    teranet_da_gdf.to_crs(epsg=3857).loc[top_da_ids].plot(ax=ax)

    # plot counts of Teranet records for each of top 20 DAs
    for index, centroid in teranet_da_gdf.to_crs(epsg=3857).loc[top_da_ids].centroid.iteritems():
        x, y = centroid.coords[0]
        ax.text(x, y,
                "DA #" + str(index) + ": " + str(teranet_da_gdf.loc[index, column]) + " records")

    # add basemap
    ctx.add_basemap(ax, url=ctx.sources.ST_TONER_BACKGROUND, alpha=0.5)

    # zoom the map
    minx, miny, maxx, maxy = teranet_da_gdf.to_crs(epsg=3857).total_bounds
    minx = minx + (maxx - minx) / minx_coef
    maxx = maxx - (maxx - minx) / maxx_coef
    miny = miny + (maxy - miny) / miny_coef
    maxy = maxy - (maxy - miny) / maxy_coef

    ax.set_xlim(minx, maxx)
    ax.set_ylim(miny, maxy)

    # set axis parameters
    plt.axis('equal')
    ax.set_axis_off()
    ax.set_title("Top {0} DAs by '{1}' of Teranet records\nfrom {2} to {3}"
                 .format(da_num, column, start_date, end_date),
                 fontdict={'fontsize': '16', 'fontweight': '3'})

    plt.show()

    # plot the barchart with counts of top 20 DAs

    # create a new figure and axis
    f, ax = plt.subplots(1, figsize=(6, 8))

    # plot a horizontal barchart, same subset as above, inverse order, for highest values to show on the top
    teranet_da_gdf.loc[top_da_ids, column] \
        .reindex(index=teranet_da_gdf.loc[top_da_ids].index[::-1]) \
        .plot(kind='barh', color='gray', ax=ax)

    # plot the mean
    ax.axvline(teranet_da_gdf.loc[top_da_ids, column].mean(), linestyle='--', color='deeppink')
    ax.text(teranet_da_gdf.loc[top_da_ids, column].mean() * 0.9,
            10,
            "Mean of top {0} DAs: {1:.2f}"
            .format(da_num,
                    teranet_da_gdf.loc[top_da_ids, column].mean()),
            rotation=90,
            fontdict={'fontsize': '14', 'fontweight': '3'})

    # set axis parameters
    ax.set_title("Top {0} DAs by count of Teranet records\nfrom {1} to {2}".format(da_num, start_date, end_date),
                 fontdict={'fontsize': '16', 'fontweight': '3'})
    ax.set_ylabel("DA id", fontdict={'fontsize': '14', 'fontweight': '3'})
    ax.set_xlabel("Count of Teranet records", fontdict={'fontsize': '14', 'fontweight': '3'})

    plt.show()

    # display counts of Teranet records per pin from the top DAs (optional)
    if display_pin_counts == True:
        # create a mask to subset Teranet records -- all records with 'da_id' in the list of top_da_ids
        mask = teranet_da_subset_df['da_id'].isin(top_da_ids)

        # call function 'unique_pins' to display counts of unique pins in the subset of Teranet records from top DAs
        unique_pins(teranet_da_subset_df[mask])